1 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 #include <petsc/private/dmlabelimpl.h> /*I "petscdmlabel.h" I*/ 3 4 /*@C 5 DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback 6 7 Input Parameters: 8 + dm - The DM object 9 . user - The user callback, may be NULL (to clear the callback) 10 - ctx - context for callback evaluation, may be NULL 11 12 Level: advanced 13 14 Notes: 15 The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency. 16 17 Any setting here overrides other configuration of DMPlex adjacency determination. 18 19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser() 20 @*/ 21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx) 22 { 23 DM_Plex *mesh = (DM_Plex *)dm->data; 24 25 PetscFunctionBegin; 26 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 27 mesh->useradjacency = user; 28 mesh->useradjacencyctx = ctx; 29 PetscFunctionReturn(0); 30 } 31 32 /*@C 33 DMPlexGetAdjacencyUser - get the user-defined adjacency callback 34 35 Input Parameter: 36 . dm - The DM object 37 38 Output Parameters: 39 - user - The user callback 40 - ctx - context for callback evaluation 41 42 Level: advanced 43 44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser() 45 @*/ 46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx) 47 { 48 DM_Plex *mesh = (DM_Plex *)dm->data; 49 50 PetscFunctionBegin; 51 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 52 if (user) *user = mesh->useradjacency; 53 if (ctx) *ctx = mesh->useradjacencyctx; 54 PetscFunctionReturn(0); 55 } 56 57 /*@ 58 DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints. 59 60 Input Parameters: 61 + dm - The DM object 62 - useAnchors - Flag to use the constraints. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 63 64 Level: intermediate 65 66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 67 @*/ 68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors) 69 { 70 DM_Plex *mesh = (DM_Plex *) dm->data; 71 72 PetscFunctionBegin; 73 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 74 mesh->useAnchors = useAnchors; 75 PetscFunctionReturn(0); 76 } 77 78 /*@ 79 DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints. 80 81 Input Parameter: 82 . dm - The DM object 83 84 Output Parameter: 85 . useAnchors - Flag to use the closure. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 86 87 Level: intermediate 88 89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 90 @*/ 91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors) 92 { 93 DM_Plex *mesh = (DM_Plex *) dm->data; 94 95 PetscFunctionBegin; 96 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 97 PetscValidIntPointer(useAnchors, 2); 98 *useAnchors = mesh->useAnchors; 99 PetscFunctionReturn(0); 100 } 101 102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 103 { 104 const PetscInt *cone = NULL; 105 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c; 106 PetscErrorCode ierr; 107 108 PetscFunctionBeginHot; 109 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 110 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 111 for (c = 0; c <= coneSize; ++c) { 112 const PetscInt point = !c ? p : cone[c-1]; 113 const PetscInt *support = NULL; 114 PetscInt supportSize, s, q; 115 116 ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr); 117 ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr); 118 for (s = 0; s < supportSize; ++s) { 119 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) { 120 if (support[s] == adj[q]) break; 121 } 122 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 123 } 124 } 125 *adjSize = numAdj; 126 PetscFunctionReturn(0); 127 } 128 129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 130 { 131 const PetscInt *support = NULL; 132 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s; 133 PetscErrorCode ierr; 134 135 PetscFunctionBeginHot; 136 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr); 137 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr); 138 for (s = 0; s <= supportSize; ++s) { 139 const PetscInt point = !s ? p : support[s-1]; 140 const PetscInt *cone = NULL; 141 PetscInt coneSize, c, q; 142 143 ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr); 144 ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr); 145 for (c = 0; c < coneSize; ++c) { 146 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) { 147 if (cone[c] == adj[q]) break; 148 } 149 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 150 } 151 } 152 *adjSize = numAdj; 153 PetscFunctionReturn(0); 154 } 155 156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[]) 157 { 158 PetscInt *star = NULL; 159 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s; 160 PetscErrorCode ierr; 161 162 PetscFunctionBeginHot; 163 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 164 for (s = 0; s < starSize*2; s += 2) { 165 const PetscInt *closure = NULL; 166 PetscInt closureSize, c, q; 167 168 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 169 for (c = 0; c < closureSize*2; c += 2) { 170 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) { 171 if (closure[c] == adj[q]) break; 172 } 173 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 174 } 175 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 176 } 177 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 178 *adjSize = numAdj; 179 PetscFunctionReturn(0); 180 } 181 182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[]) 183 { 184 static PetscInt asiz = 0; 185 PetscInt maxAnchors = 1; 186 PetscInt aStart = -1, aEnd = -1; 187 PetscInt maxAdjSize; 188 PetscSection aSec = NULL; 189 IS aIS = NULL; 190 const PetscInt *anchors; 191 DM_Plex *mesh = (DM_Plex *)dm->data; 192 PetscErrorCode ierr; 193 194 PetscFunctionBeginHot; 195 if (useAnchors) { 196 ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr); 197 if (aSec) { 198 ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr); 199 maxAnchors = PetscMax(1,maxAnchors); 200 ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr); 201 ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr); 202 } 203 } 204 if (!*adj) { 205 PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd; 206 207 ierr = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr); 208 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 209 ierr = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr); 210 coneSeries = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1; 211 supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1; 212 asiz = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries); 213 asiz *= maxAnchors; 214 asiz = PetscMin(asiz,pEnd-pStart); 215 ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr); 216 } 217 if (*adjSize < 0) *adjSize = asiz; 218 maxAdjSize = *adjSize; 219 if (mesh->useradjacency) { 220 ierr = (*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr); 221 } else if (useTransitiveClosure) { 222 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr); 223 } else if (useCone) { 224 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 225 } else { 226 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 227 } 228 if (useAnchors && aSec) { 229 PetscInt origSize = *adjSize; 230 PetscInt numAdj = origSize; 231 PetscInt i = 0, j; 232 PetscInt *orig = *adj; 233 234 while (i < origSize) { 235 PetscInt p = orig[i]; 236 PetscInt aDof = 0; 237 238 if (p >= aStart && p < aEnd) { 239 ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr); 240 } 241 if (aDof) { 242 PetscInt aOff; 243 PetscInt s, q; 244 245 for (j = i + 1; j < numAdj; j++) { 246 orig[j - 1] = orig[j]; 247 } 248 origSize--; 249 numAdj--; 250 ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr); 251 for (s = 0; s < aDof; ++s) { 252 for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) { 253 if (anchors[aOff+s] == orig[q]) break; 254 } 255 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 256 } 257 } 258 else { 259 i++; 260 } 261 } 262 *adjSize = numAdj; 263 ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr); 264 } 265 PetscFunctionReturn(0); 266 } 267 268 /*@ 269 DMPlexGetAdjacency - Return all points adjacent to the given point 270 271 Input Parameters: 272 + dm - The DM object 273 . p - The point 274 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE 275 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize 276 277 Output Parameters: 278 + adjSize - The number of adjacent points 279 - adj - The adjacent points 280 281 Level: advanced 282 283 Notes: 284 The user must PetscFree the adj array if it was not passed in. 285 286 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator() 287 @*/ 288 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[]) 289 { 290 PetscBool useCone, useClosure, useAnchors; 291 PetscErrorCode ierr; 292 293 PetscFunctionBeginHot; 294 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 295 PetscValidPointer(adjSize,3); 296 PetscValidPointer(adj,4); 297 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr); 298 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr); 299 ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr); 300 PetscFunctionReturn(0); 301 } 302 303 /*@ 304 DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity 305 306 Collective on dm 307 308 Input Parameters: 309 + dm - The DM 310 - sfPoint - The PetscSF which encodes point connectivity 311 312 Output Parameters: 313 + processRanks - A list of process neighbors, or NULL 314 - sfProcess - An SF encoding the two-sided process connectivity, or NULL 315 316 Level: developer 317 318 .seealso: PetscSFCreate(), DMPlexCreateProcessSF() 319 @*/ 320 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess) 321 { 322 const PetscSFNode *remotePoints; 323 PetscInt *localPointsNew; 324 PetscSFNode *remotePointsNew; 325 const PetscInt *nranks; 326 PetscInt *ranksNew; 327 PetscBT neighbors; 328 PetscInt pStart, pEnd, p, numLeaves, l, numNeighbors, n; 329 PetscMPIInt size, proc, rank; 330 PetscErrorCode ierr; 331 332 PetscFunctionBegin; 333 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 334 PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2); 335 if (processRanks) {PetscValidPointer(processRanks, 3);} 336 if (sfProcess) {PetscValidPointer(sfProcess, 4);} 337 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr); 338 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 339 ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr); 340 ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr); 341 ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr); 342 /* Compute root-to-leaf process connectivity */ 343 ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr); 344 ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr); 345 for (p = pStart; p < pEnd; ++p) { 346 PetscInt ndof, noff, n; 347 348 ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr); 349 ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr); 350 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);} 351 } 352 ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr); 353 /* Compute leaf-to-neighbor process connectivity */ 354 ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr); 355 ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr); 356 for (p = pStart; p < pEnd; ++p) { 357 PetscInt ndof, noff, n; 358 359 ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr); 360 ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr); 361 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);} 362 } 363 ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr); 364 /* Compute leaf-to-root process connectivity */ 365 for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);} 366 /* Calculate edges */ 367 PetscBTClear(neighbors, rank); 368 for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;} 369 ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr); 370 ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr); 371 ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr); 372 for(proc = 0, n = 0; proc < size; ++proc) { 373 if (PetscBTLookup(neighbors, proc)) { 374 ranksNew[n] = proc; 375 localPointsNew[n] = proc; 376 remotePointsNew[n].index = rank; 377 remotePointsNew[n].rank = proc; 378 ++n; 379 } 380 } 381 ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr); 382 if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);} 383 else {ierr = PetscFree(ranksNew);CHKERRQ(ierr);} 384 if (sfProcess) { 385 ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr); 386 ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr); 387 ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr); 388 ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr); 389 } 390 PetscFunctionReturn(0); 391 } 392 393 /*@ 394 DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF. 395 396 Collective on dm 397 398 Input Parameter: 399 . dm - The DM 400 401 Output Parameters: 402 + rootSection - The number of leaves for a given root point 403 . rootrank - The rank of each edge into the root point 404 . leafSection - The number of processes sharing a given leaf point 405 - leafrank - The rank of each process sharing a leaf point 406 407 Level: developer 408 409 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistribute(), DMPlexDistributeOverlap() 410 @*/ 411 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank) 412 { 413 MPI_Comm comm; 414 PetscSF sfPoint; 415 const PetscInt *rootdegree; 416 PetscInt *myrank, *remoterank; 417 PetscInt pStart, pEnd, p, nedges; 418 PetscMPIInt rank; 419 PetscErrorCode ierr; 420 421 PetscFunctionBegin; 422 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 423 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 424 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 425 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 426 /* Compute number of leaves for each root */ 427 ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr); 428 ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr); 429 ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr); 430 ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr); 431 for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);} 432 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 433 /* Gather rank of each leaf to root */ 434 ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr); 435 ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr); 436 ierr = PetscMalloc1(nedges, &remoterank);CHKERRQ(ierr); 437 for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank; 438 ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 439 ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 440 ierr = PetscFree(myrank);CHKERRQ(ierr); 441 ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr); 442 /* Distribute remote ranks to leaves */ 443 ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr); 444 ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr); 445 PetscFunctionReturn(0); 446 } 447 448 /*@C 449 DMPlexCreateOverlapLabel - Compute owner information for shared points. This basically gets two-sided for an SF. 450 451 Collective on dm 452 453 Input Parameters: 454 + dm - The DM 455 . levels - Number of overlap levels 456 . rootSection - The number of leaves for a given root point 457 . rootrank - The rank of each edge into the root point 458 . leafSection - The number of processes sharing a given leaf point 459 - leafrank - The rank of each process sharing a leaf point 460 461 Output Parameters: 462 . ovLabel - DMLabel containing remote overlap contributions as point/rank pairings 463 464 Level: developer 465 466 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute() 467 @*/ 468 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel) 469 { 470 MPI_Comm comm; 471 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */ 472 PetscSF sfPoint; 473 const PetscSFNode *remote; 474 const PetscInt *local; 475 const PetscInt *nrank, *rrank; 476 PetscInt *adj = NULL; 477 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l; 478 PetscMPIInt rank, size; 479 PetscBool flg; 480 PetscErrorCode ierr; 481 482 PetscFunctionBegin; 483 *ovLabel = NULL; 484 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 485 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 486 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 487 if (size == 1) PetscFunctionReturn(0); 488 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 489 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 490 ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr); 491 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 492 ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr); 493 /* Handle leaves: shared with the root point */ 494 for (l = 0; l < nleaves; ++l) { 495 PetscInt adjSize = PETSC_DETERMINE, a; 496 497 ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr); 498 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);} 499 } 500 ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr); 501 ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr); 502 /* Handle roots */ 503 for (p = pStart; p < pEnd; ++p) { 504 PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a; 505 506 if ((p >= sStart) && (p < sEnd)) { 507 /* Some leaves share a root with other leaves on different processes */ 508 ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr); 509 if (neighbors) { 510 ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr); 511 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 512 for (n = 0; n < neighbors; ++n) { 513 const PetscInt remoteRank = nrank[noff+n]; 514 515 if (remoteRank == rank) continue; 516 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 517 } 518 } 519 } 520 /* Roots are shared with leaves */ 521 ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr); 522 if (!neighbors) continue; 523 ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr); 524 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 525 for (n = 0; n < neighbors; ++n) { 526 const PetscInt remoteRank = rrank[noff+n]; 527 528 if (remoteRank == rank) continue; 529 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 530 } 531 } 532 ierr = PetscFree(adj);CHKERRQ(ierr); 533 ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr); 534 ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr); 535 /* Add additional overlap levels */ 536 for (l = 1; l < levels; l++) { 537 /* Propagate point donations over SF to capture remote connections */ 538 ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr); 539 /* Add next level of point donations to the label */ 540 ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr); 541 } 542 /* We require the closure in the overlap */ 543 ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr); 544 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr); 545 if (flg) { 546 PetscViewer viewer; 547 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr); 548 ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr); 549 } 550 /* Invert sender to receiver label */ 551 ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr); 552 ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel);CHKERRQ(ierr); 553 /* Add owned points, except for shared local points */ 554 for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);} 555 for (l = 0; l < nleaves; ++l) { 556 ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr); 557 ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr); 558 } 559 /* Clean up */ 560 ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr); 561 PetscFunctionReturn(0); 562 } 563 564 /*@C 565 DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF 566 567 Collective on dm 568 569 Input Parameters: 570 + dm - The DM 571 - overlapSF - The SF mapping ghost points in overlap to owner points on other processes 572 573 Output Parameters: 574 . migrationSF - An SF that maps original points in old locations to points in new locations 575 576 Level: developer 577 578 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistributeOverlap(), DMPlexDistribute() 579 @*/ 580 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF) 581 { 582 MPI_Comm comm; 583 PetscMPIInt rank, size; 584 PetscInt d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints; 585 PetscInt *pointDepths, *remoteDepths, *ilocal; 586 PetscInt *depthRecv, *depthShift, *depthIdx; 587 PetscSFNode *iremote; 588 PetscSF pointSF; 589 const PetscInt *sharedLocal; 590 const PetscSFNode *overlapRemote, *sharedRemote; 591 PetscErrorCode ierr; 592 593 PetscFunctionBegin; 594 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 595 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 596 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 597 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 598 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 599 600 /* Before building the migration SF we need to know the new stratum offsets */ 601 ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr); 602 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr); 603 for (d=0; d<dim+1; d++) { 604 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 605 for (p=pStart; p<pEnd; p++) pointDepths[p] = d; 606 } 607 for (p=0; p<nleaves; p++) remoteDepths[p] = -1; 608 ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 609 ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 610 611 /* Count recevied points in each stratum and compute the internal strata shift */ 612 ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr); 613 for (d=0; d<dim+1; d++) depthRecv[d]=0; 614 for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++; 615 depthShift[dim] = 0; 616 for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim]; 617 for (d=1; d<dim; d++) depthShift[d] += depthRecv[0]; 618 for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1]; 619 for (d=0; d<dim+1; d++) { 620 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 621 depthIdx[d] = pStart + depthShift[d]; 622 } 623 624 /* Form the overlap SF build an SF that describes the full overlap migration SF */ 625 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 626 newLeaves = pEnd - pStart + nleaves; 627 ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr); 628 ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr); 629 /* First map local points to themselves */ 630 for (d=0; d<dim+1; d++) { 631 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 632 for (p=pStart; p<pEnd; p++) { 633 point = p + depthShift[d]; 634 ilocal[point] = point; 635 iremote[point].index = p; 636 iremote[point].rank = rank; 637 depthIdx[d]++; 638 } 639 } 640 641 /* Add in the remote roots for currently shared points */ 642 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr); 643 ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr); 644 for (d=0; d<dim+1; d++) { 645 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 646 for (p=0; p<numSharedPoints; p++) { 647 if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) { 648 point = sharedLocal[p] + depthShift[d]; 649 iremote[point].index = sharedRemote[p].index; 650 iremote[point].rank = sharedRemote[p].rank; 651 } 652 } 653 } 654 655 /* Now add the incoming overlap points */ 656 for (p=0; p<nleaves; p++) { 657 point = depthIdx[remoteDepths[p]]; 658 ilocal[point] = point; 659 iremote[point].index = overlapRemote[p].index; 660 iremote[point].rank = overlapRemote[p].rank; 661 depthIdx[remoteDepths[p]]++; 662 } 663 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr); 664 665 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr); 666 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr); 667 ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr); 668 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 669 ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr); 670 671 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr); 672 PetscFunctionReturn(0); 673 } 674 675 /*@ 676 DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification. 677 678 Input Parameter: 679 + dm - The DM 680 - sf - A star forest with non-ordered leaves, usually defining a DM point migration 681 682 Output Parameter: 683 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified 684 685 Level: developer 686 687 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap() 688 @*/ 689 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF) 690 { 691 MPI_Comm comm; 692 PetscMPIInt rank, size; 693 PetscInt d, ldepth, depth, p, pStart, pEnd, nroots, nleaves; 694 PetscInt *pointDepths, *remoteDepths, *ilocal; 695 PetscInt *depthRecv, *depthShift, *depthIdx; 696 PetscInt hybEnd[4]; 697 const PetscSFNode *iremote; 698 PetscErrorCode ierr; 699 700 PetscFunctionBegin; 701 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 702 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 703 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 704 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 705 ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr); 706 ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr); 707 if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth); 708 ierr = PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr); 709 710 /* Before building the migration SF we need to know the new stratum offsets */ 711 ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr); 712 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr); 713 ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[PetscMax(depth-1,0)],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr); 714 for (d = 0; d < depth+1; ++d) { 715 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 716 for (p = pStart; p < pEnd; ++p) { 717 if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */ 718 pointDepths[p] = 2 * d; 719 } else { 720 pointDepths[p] = 2 * d + 1; 721 } 722 } 723 } 724 for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1; 725 ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 726 ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 727 /* Count received points in each stratum and compute the internal strata shift */ 728 ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr); 729 for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0; 730 for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++; 731 depthShift[2*depth+1] = 0; 732 for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1]; 733 for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth]; 734 depthShift[0] += depthRecv[1]; 735 for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1]; 736 for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0]; 737 for (d = 2 * depth-1; d > 2; --d) { 738 PetscInt e; 739 740 for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d]; 741 } 742 for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;} 743 /* Derive a new local permutation based on stratified indices */ 744 ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr); 745 for (p = 0; p < nleaves; ++p) { 746 const PetscInt dep = remoteDepths[p]; 747 748 ilocal[p] = depthShift[dep] + depthIdx[dep]; 749 depthIdx[dep]++; 750 } 751 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr); 752 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr); 753 ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr); 754 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr); 755 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr); 756 ierr = PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr); 757 PetscFunctionReturn(0); 758 } 759 760 /*@ 761 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 762 763 Collective on dm 764 765 Input Parameters: 766 + dm - The DMPlex object 767 . pointSF - The PetscSF describing the communication pattern 768 . originalSection - The PetscSection for existing data layout 769 - originalVec - The existing data in a local vector 770 771 Output Parameters: 772 + newSection - The PetscSF describing the new data layout 773 - newVec - The new data in a local vector 774 775 Level: developer 776 777 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData() 778 @*/ 779 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec) 780 { 781 PetscSF fieldSF; 782 PetscInt *remoteOffsets, fieldSize; 783 PetscScalar *originalValues, *newValues; 784 PetscErrorCode ierr; 785 786 PetscFunctionBegin; 787 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 788 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 789 790 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 791 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr); 792 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr); 793 794 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr); 795 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr); 796 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 797 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 798 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 799 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 800 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 801 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr); 802 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr); 803 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 804 PetscFunctionReturn(0); 805 } 806 807 /*@ 808 DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 809 810 Collective on dm 811 812 Input Parameters: 813 + dm - The DMPlex object 814 . pointSF - The PetscSF describing the communication pattern 815 . originalSection - The PetscSection for existing data layout 816 - originalIS - The existing data 817 818 Output Parameters: 819 + newSection - The PetscSF describing the new data layout 820 - newIS - The new data 821 822 Level: developer 823 824 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData() 825 @*/ 826 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS) 827 { 828 PetscSF fieldSF; 829 PetscInt *newValues, *remoteOffsets, fieldSize; 830 const PetscInt *originalValues; 831 PetscErrorCode ierr; 832 833 PetscFunctionBegin; 834 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 835 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 836 837 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 838 ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr); 839 840 ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr); 841 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 842 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 843 ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 844 ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 845 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 846 ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr); 847 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr); 848 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 849 PetscFunctionReturn(0); 850 } 851 852 /*@ 853 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 854 855 Collective on dm 856 857 Input Parameters: 858 + dm - The DMPlex object 859 . pointSF - The PetscSF describing the communication pattern 860 . originalSection - The PetscSection for existing data layout 861 . datatype - The type of data 862 - originalData - The existing data 863 864 Output Parameters: 865 + newSection - The PetscSection describing the new data layout 866 - newData - The new data 867 868 Level: developer 869 870 .seealso: DMPlexDistribute(), DMPlexDistributeField() 871 @*/ 872 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData) 873 { 874 PetscSF fieldSF; 875 PetscInt *remoteOffsets, fieldSize; 876 PetscMPIInt dataSize; 877 PetscErrorCode ierr; 878 879 PetscFunctionBegin; 880 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 881 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 882 883 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 884 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr); 885 ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr); 886 887 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 888 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 889 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 890 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 891 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 892 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 893 PetscFunctionReturn(0); 894 } 895 896 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel) 897 { 898 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 899 MPI_Comm comm; 900 PetscSF coneSF; 901 PetscSection originalConeSection, newConeSection; 902 PetscInt *remoteOffsets, *cones, *globCones, *newCones, newConesSize; 903 PetscBool flg; 904 PetscErrorCode ierr; 905 906 PetscFunctionBegin; 907 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 908 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5); 909 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 910 /* Distribute cone section */ 911 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 912 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr); 913 ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr); 914 ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr); 915 ierr = DMSetUp(dmParallel);CHKERRQ(ierr); 916 { 917 PetscInt pStart, pEnd, p; 918 919 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr); 920 for (p = pStart; p < pEnd; ++p) { 921 PetscInt coneSize; 922 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr); 923 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize); 924 } 925 } 926 /* Communicate and renumber cones */ 927 ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr); 928 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 929 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); 930 if (original) { 931 PetscInt numCones; 932 933 ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); 934 ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr); 935 ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr); 936 } else { 937 globCones = cones; 938 } 939 ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr); 940 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr); 941 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr); 942 if (original) { 943 ierr = PetscFree(globCones);CHKERRQ(ierr); 944 } 945 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr); 946 ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr); 947 #if defined(PETSC_USE_DEBUG) 948 { 949 PetscInt p; 950 PetscBool valid = PETSC_TRUE; 951 for (p = 0; p < newConesSize; ++p) { 952 if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);} 953 } 954 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map"); 955 } 956 #endif 957 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr); 958 if (flg) { 959 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr); 960 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr); 961 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr); 962 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr); 963 ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr); 964 } 965 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr); 966 ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr); 967 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 968 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 969 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr); 970 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 971 /* Create supports and stratify DMPlex */ 972 { 973 PetscInt pStart, pEnd; 974 975 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr); 976 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr); 977 } 978 ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr); 979 ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr); 980 { 981 PetscBool useCone, useClosure, useAnchors; 982 983 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr); 984 ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr); 985 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr); 986 ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr); 987 } 988 PetscFunctionReturn(0); 989 } 990 991 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel) 992 { 993 MPI_Comm comm; 994 PetscSection originalCoordSection, newCoordSection; 995 Vec originalCoordinates, newCoordinates; 996 PetscInt bs; 997 PetscBool isper; 998 const char *name; 999 const PetscReal *maxCell, *L; 1000 const DMBoundaryType *bd; 1001 PetscErrorCode ierr; 1002 1003 PetscFunctionBegin; 1004 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1005 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3); 1006 1007 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1008 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr); 1009 ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr); 1010 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr); 1011 if (originalCoordinates) { 1012 ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr); 1013 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr); 1014 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr); 1015 1016 ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr); 1017 ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr); 1018 ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr); 1019 ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr); 1020 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr); 1021 } 1022 ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr); 1023 ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr); 1024 PetscFunctionReturn(0); 1025 } 1026 1027 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel) 1028 { 1029 DM_Plex *mesh = (DM_Plex*) dm->data; 1030 MPI_Comm comm; 1031 DMLabel depthLabel; 1032 PetscMPIInt rank; 1033 PetscInt depth, d, numLabels, numLocalLabels, l; 1034 PetscBool hasLabels = PETSC_FALSE, lsendDepth, sendDepth; 1035 PetscObjectState depthState = -1; 1036 PetscErrorCode ierr; 1037 1038 PetscFunctionBegin; 1039 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1040 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3); 1041 1042 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1043 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1044 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1045 1046 /* If the user has changed the depth label, communicate it instead */ 1047 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 1048 ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr); 1049 if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);} 1050 lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE; 1051 ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr); 1052 if (sendDepth) { 1053 ierr = DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel);CHKERRQ(ierr); 1054 ierr = DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE);CHKERRQ(ierr); 1055 } 1056 /* Everyone must have either the same number of labels, or none */ 1057 ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr); 1058 numLabels = numLocalLabels; 1059 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr); 1060 if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE; 1061 for (l = numLabels-1; l >= 0; --l) { 1062 DMLabel label = NULL, labelNew = NULL; 1063 PetscBool isDepth, lisOutput = PETSC_TRUE, isOutput; 1064 const char *name = NULL; 1065 1066 if (hasLabels) { 1067 ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr); 1068 /* Skip "depth" because it is recreated */ 1069 ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr); 1070 ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr); 1071 } 1072 ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr); 1073 if (isDepth && !sendDepth) continue; 1074 ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr); 1075 if (isDepth) { 1076 /* Put in any missing strata which can occur if users are managing the depth label themselves */ 1077 PetscInt gdepth; 1078 1079 ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr); 1080 if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth); 1081 for (d = 0; d <= gdepth; ++d) { 1082 PetscBool has; 1083 1084 ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr); 1085 if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);} 1086 } 1087 } 1088 ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr); 1089 /* Put the output flag in the new label */ 1090 if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);} 1091 ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr); 1092 ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr); 1093 ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr); 1094 ierr = DMLabelDestroy(&labelNew);CHKERRQ(ierr); 1095 } 1096 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1097 PetscFunctionReturn(0); 1098 } 1099 1100 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel) 1101 { 1102 DM_Plex *mesh = (DM_Plex*) dm->data; 1103 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1104 PetscBool *isHybrid, *isHybridParallel; 1105 PetscInt dim, depth, d; 1106 PetscInt pStart, pEnd, pStartP, pEndP; 1107 PetscErrorCode ierr; 1108 1109 PetscFunctionBegin; 1110 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1111 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3); 1112 1113 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1114 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 1115 ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr); 1116 ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr); 1117 ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr); 1118 for (d = 0; d <= depth; d++) { 1119 PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d]; 1120 1121 if (hybridMax >= 0) { 1122 PetscInt sStart, sEnd, p; 1123 1124 ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr); 1125 for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE; 1126 } 1127 } 1128 ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr); 1129 ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr); 1130 for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1; 1131 for (d = 0; d <= depth; d++) { 1132 PetscInt sStart, sEnd, p, dd; 1133 1134 ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr); 1135 dd = (depth == 1 && d == 1) ? dim : d; 1136 for (p = sStart; p < sEnd; p++) { 1137 if (isHybridParallel[p-pStartP]) { 1138 pmesh->hybridPointMax[dd] = p; 1139 break; 1140 } 1141 } 1142 } 1143 ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr); 1144 PetscFunctionReturn(0); 1145 } 1146 1147 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel) 1148 { 1149 DM_Plex *mesh = (DM_Plex*) dm->data; 1150 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1151 MPI_Comm comm; 1152 DM refTree; 1153 PetscSection origParentSection, newParentSection; 1154 PetscInt *origParents, *origChildIDs; 1155 PetscBool flg; 1156 PetscErrorCode ierr; 1157 1158 PetscFunctionBegin; 1159 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1160 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5); 1161 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1162 1163 /* Set up tree */ 1164 ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr); 1165 ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr); 1166 ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr); 1167 if (origParentSection) { 1168 PetscInt pStart, pEnd; 1169 PetscInt *newParents, *newChildIDs, *globParents; 1170 PetscInt *remoteOffsetsParents, newParentSize; 1171 PetscSF parentSF; 1172 1173 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1174 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr); 1175 ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr); 1176 ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr); 1177 ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr); 1178 ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr); 1179 ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr); 1180 ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr); 1181 if (original) { 1182 PetscInt numParents; 1183 1184 ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr); 1185 ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr); 1186 ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr); 1187 } 1188 else { 1189 globParents = origParents; 1190 } 1191 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr); 1192 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr); 1193 if (original) { 1194 ierr = PetscFree(globParents);CHKERRQ(ierr); 1195 } 1196 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1197 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1198 ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr); 1199 #if defined(PETSC_USE_DEBUG) 1200 { 1201 PetscInt p; 1202 PetscBool valid = PETSC_TRUE; 1203 for (p = 0; p < newParentSize; ++p) { 1204 if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);} 1205 } 1206 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map"); 1207 } 1208 #endif 1209 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr); 1210 if (flg) { 1211 ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr); 1212 ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr); 1213 ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr); 1214 ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr); 1215 ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr); 1216 } 1217 ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr); 1218 ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr); 1219 ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr); 1220 ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr); 1221 } 1222 pmesh->useAnchors = mesh->useAnchors; 1223 PetscFunctionReturn(0); 1224 } 1225 1226 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel) 1227 { 1228 PetscMPIInt rank, size; 1229 MPI_Comm comm; 1230 PetscErrorCode ierr; 1231 1232 PetscFunctionBegin; 1233 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1234 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3); 1235 1236 /* Create point SF for parallel mesh */ 1237 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1238 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1239 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1240 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1241 { 1242 const PetscInt *leaves; 1243 PetscSFNode *remotePoints, *rowners, *lowners; 1244 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints; 1245 PetscInt pStart, pEnd; 1246 1247 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1248 ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr); 1249 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr); 1250 for (p=0; p<numRoots; p++) { 1251 rowners[p].rank = -1; 1252 rowners[p].index = -1; 1253 } 1254 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1255 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1256 for (p = 0; p < numLeaves; ++p) { 1257 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */ 1258 lowners[p].rank = rank; 1259 lowners[p].index = leaves ? leaves[p] : p; 1260 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */ 1261 lowners[p].rank = -2; 1262 lowners[p].index = -2; 1263 } 1264 } 1265 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */ 1266 rowners[p].rank = -3; 1267 rowners[p].index = -3; 1268 } 1269 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1270 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1271 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1272 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1273 for (p = 0; p < numLeaves; ++p) { 1274 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed"); 1275 if (lowners[p].rank != rank) ++numGhostPoints; 1276 } 1277 ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr); 1278 ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr); 1279 for (p = 0, gp = 0; p < numLeaves; ++p) { 1280 if (lowners[p].rank != rank) { 1281 ghostPoints[gp] = leaves ? leaves[p] : p; 1282 remotePoints[gp].rank = lowners[p].rank; 1283 remotePoints[gp].index = lowners[p].index; 1284 ++gp; 1285 } 1286 } 1287 ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr); 1288 ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 1289 ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr); 1290 } 1291 { 1292 PetscBool useCone, useClosure, useAnchors; 1293 1294 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr); 1295 ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr); 1296 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr); 1297 ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr); 1298 } 1299 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1300 PetscFunctionReturn(0); 1301 } 1302 1303 /*@ 1304 DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition? 1305 1306 Input Parameters: 1307 + dm - The DMPlex object 1308 - flg - Balance the partition? 1309 1310 Level: intermediate 1311 1312 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance() 1313 @*/ 1314 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg) 1315 { 1316 DM_Plex *mesh = (DM_Plex *)dm->data; 1317 1318 PetscFunctionBegin; 1319 mesh->partitionBalance = flg; 1320 PetscFunctionReturn(0); 1321 } 1322 1323 /*@ 1324 DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition? 1325 1326 Input Parameter: 1327 . dm - The DMPlex object 1328 1329 Output Parameter: 1330 . flg - Balance the partition? 1331 1332 Level: intermediate 1333 1334 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance() 1335 @*/ 1336 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg) 1337 { 1338 DM_Plex *mesh = (DM_Plex *)dm->data; 1339 1340 PetscFunctionBegin; 1341 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1342 PetscValidBoolPointer(flg, 2); 1343 *flg = mesh->partitionBalance; 1344 PetscFunctionReturn(0); 1345 } 1346 1347 typedef struct { 1348 PetscInt vote, rank, index; 1349 } Petsc3Int; 1350 1351 /* MaxLoc, but carry a third piece of information around */ 1352 static void MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype) 1353 { 1354 Petsc3Int *a = (Petsc3Int *)inout_; 1355 Petsc3Int *b = (Petsc3Int *)in_; 1356 PetscInt i, len = *len_; 1357 for (i = 0; i < len; i++) { 1358 if (a[i].vote < b[i].vote) { 1359 a[i].vote = b[i].vote; 1360 a[i].rank = b[i].rank; 1361 a[i].index = b[i].index; 1362 } else if (a[i].vote <= b[i].vote) { 1363 if (a[i].rank >= b[i].rank) { 1364 a[i].rank = b[i].rank; 1365 a[i].index = b[i].index; 1366 } 1367 } 1368 } 1369 } 1370 1371 /*@C 1372 DMPlexCreatePointSF - Build a point SF from an SF describing a point migration 1373 1374 Input Parameter: 1375 + dm - The source DMPlex object 1376 . migrationSF - The star forest that describes the parallel point remapping 1377 . ownership - Flag causing a vote to determine point ownership 1378 1379 Output Parameter: 1380 - pointSF - The star forest describing the point overlap in the remapped DM 1381 1382 Level: developer 1383 1384 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap() 1385 @*/ 1386 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF) 1387 { 1388 PetscMPIInt rank, size; 1389 PetscInt p, nroots, nleaves, idx, npointLeaves; 1390 PetscInt *pointLocal; 1391 const PetscInt *leaves; 1392 const PetscSFNode *roots; 1393 PetscSFNode *rootNodes, *leafNodes, *pointRemote; 1394 Vec shifts; 1395 const PetscInt numShifts = 13759; 1396 const PetscScalar *shift = NULL; 1397 const PetscBool shiftDebug = PETSC_FALSE; 1398 PetscBool balance; 1399 PetscErrorCode ierr; 1400 1401 PetscFunctionBegin; 1402 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1403 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 1404 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr); 1405 ierr = PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr); 1406 1407 ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr); 1408 ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr); 1409 ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr); 1410 if (ownership) { 1411 MPI_Op op; 1412 MPI_Datatype datatype; 1413 Petsc3Int *rootVote = NULL, *leafVote = NULL; 1414 /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */ 1415 if (balance) { 1416 PetscRandom r; 1417 1418 ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr); 1419 ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr); 1420 ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr); 1421 ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr); 1422 ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr); 1423 ierr = VecSetRandom(shifts, r);CHKERRQ(ierr); 1424 ierr = PetscRandomDestroy(&r);CHKERRQ(ierr); 1425 ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr); 1426 } 1427 1428 ierr = PetscMalloc1(nroots, &rootVote);CHKERRQ(ierr); 1429 ierr = PetscMalloc1(nleaves, &leafVote);CHKERRQ(ierr); 1430 /* Point ownership vote: Process with highest rank owns shared points */ 1431 for (p = 0; p < nleaves; ++p) { 1432 if (shiftDebug) { 1433 ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr); 1434 } 1435 /* Either put in a bid or we know we own it */ 1436 leafVote[p].vote = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size; 1437 leafVote[p].rank = rank; 1438 leafVote[p].index = p; 1439 } 1440 for (p = 0; p < nroots; p++) { 1441 /* Root must not participate in the reduction, flag so that MAXLOC does not use */ 1442 rootVote[p].vote = -3; 1443 rootVote[p].rank = -3; 1444 rootVote[p].index = -3; 1445 } 1446 ierr = MPI_Type_contiguous(3, MPIU_INT, &datatype);CHKERRQ(ierr); 1447 ierr = MPI_Type_commit(&datatype);CHKERRQ(ierr); 1448 ierr = MPI_Op_create(&MaxLocCarry, 1, &op);CHKERRQ(ierr); 1449 ierr = PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr); 1450 ierr = PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr); 1451 ierr = MPI_Op_free(&op);CHKERRQ(ierr); 1452 ierr = MPI_Type_free(&datatype);CHKERRQ(ierr); 1453 for (p = 0; p < nroots; p++) { 1454 rootNodes[p].rank = rootVote[p].rank; 1455 rootNodes[p].index = rootVote[p].index; 1456 } 1457 ierr = PetscFree(leafVote);CHKERRQ(ierr); 1458 ierr = PetscFree(rootVote);CHKERRQ(ierr); 1459 } else { 1460 for (p = 0; p < nroots; p++) { 1461 rootNodes[p].index = -1; 1462 rootNodes[p].rank = rank; 1463 } 1464 for (p = 0; p < nleaves; p++) { 1465 /* Write new local id into old location */ 1466 if (roots[p].rank == rank) { 1467 rootNodes[roots[p].index].index = leaves ? leaves[p] : p; 1468 } 1469 } 1470 } 1471 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr); 1472 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr); 1473 1474 for (npointLeaves = 0, p = 0; p < nleaves; p++) { 1475 if (leafNodes[p].rank != rank) npointLeaves++; 1476 } 1477 ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr); 1478 ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr); 1479 for (idx = 0, p = 0; p < nleaves; p++) { 1480 if (leafNodes[p].rank != rank) { 1481 pointLocal[idx] = p; 1482 pointRemote[idx] = leafNodes[p]; 1483 idx++; 1484 } 1485 } 1486 if (shift) { 1487 ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr); 1488 ierr = VecDestroy(&shifts);CHKERRQ(ierr); 1489 } 1490 if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);} 1491 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr); 1492 ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr); 1493 ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1494 ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr); 1495 ierr = PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr); 1496 PetscFunctionReturn(0); 1497 } 1498 1499 /*@C 1500 DMPlexMigrate - Migrates internal DM data over the supplied star forest 1501 1502 Collective on dm 1503 1504 Input Parameter: 1505 + dm - The source DMPlex object 1506 . sf - The star forest communication context describing the migration pattern 1507 1508 Output Parameter: 1509 - targetDM - The target DMPlex object 1510 1511 Level: intermediate 1512 1513 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap() 1514 @*/ 1515 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM) 1516 { 1517 MPI_Comm comm; 1518 PetscInt dim, cdim, nroots; 1519 PetscSF sfPoint; 1520 ISLocalToGlobalMapping ltogMigration; 1521 ISLocalToGlobalMapping ltogOriginal = NULL; 1522 PetscBool flg; 1523 PetscErrorCode ierr; 1524 1525 PetscFunctionBegin; 1526 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1527 ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr); 1528 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 1529 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1530 ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr); 1531 ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr); 1532 ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr); 1533 1534 /* Check for a one-to-all distribution pattern */ 1535 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 1536 ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 1537 if (nroots >= 0) { 1538 IS isOriginal; 1539 PetscInt n, size, nleaves; 1540 PetscInt *numbering_orig, *numbering_new; 1541 1542 /* Get the original point numbering */ 1543 ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr); 1544 ierr = ISLocalToGlobalMappingCreateIS(isOriginal, <ogOriginal);CHKERRQ(ierr); 1545 ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr); 1546 ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr); 1547 /* Convert to positive global numbers */ 1548 for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);} 1549 /* Derive the new local-to-global mapping from the old one */ 1550 ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr); 1551 ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr); 1552 ierr = PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new);CHKERRQ(ierr); 1553 ierr = PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new);CHKERRQ(ierr); 1554 ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, <ogMigration);CHKERRQ(ierr); 1555 ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr); 1556 ierr = ISDestroy(&isOriginal);CHKERRQ(ierr); 1557 } else { 1558 /* One-to-all distribution pattern: We can derive LToG from SF */ 1559 ierr = ISLocalToGlobalMappingCreateSF(sf, 0, <ogMigration);CHKERRQ(ierr); 1560 } 1561 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 1562 if (flg) { 1563 ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr); 1564 ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr); 1565 } 1566 /* Migrate DM data to target DM */ 1567 ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr); 1568 ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr); 1569 ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr); 1570 ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr); 1571 ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr); 1572 ierr = ISLocalToGlobalMappingDestroy(<ogOriginal);CHKERRQ(ierr); 1573 ierr = ISLocalToGlobalMappingDestroy(<ogMigration);CHKERRQ(ierr); 1574 ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr); 1575 PetscFunctionReturn(0); 1576 } 1577 1578 /*@C 1579 DMPlexDistribute - Distributes the mesh and any associated sections. 1580 1581 Collective on dm 1582 1583 Input Parameter: 1584 + dm - The original DMPlex object 1585 - overlap - The overlap of partitions, 0 is the default 1586 1587 Output Parameter: 1588 + sf - The PetscSF used for point distribution, or NULL if not needed 1589 - dmParallel - The distributed DMPlex object 1590 1591 Note: If the mesh was not distributed, the output dmParallel will be NULL. 1592 1593 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function 1594 representation on the mesh. 1595 1596 Level: intermediate 1597 1598 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexGetOverlap() 1599 @*/ 1600 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel) 1601 { 1602 MPI_Comm comm; 1603 PetscPartitioner partitioner; 1604 IS cellPart; 1605 PetscSection cellPartSection; 1606 DM dmCoord; 1607 DMLabel lblPartition, lblMigration; 1608 PetscSF sfMigration, sfStratified, sfPoint; 1609 PetscBool flg, balance; 1610 PetscMPIInt rank, size; 1611 PetscErrorCode ierr; 1612 1613 PetscFunctionBegin; 1614 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1615 PetscValidLogicalCollectiveInt(dm, overlap, 2); 1616 if (sf) PetscValidPointer(sf,3); 1617 PetscValidPointer(dmParallel,4); 1618 1619 if (sf) *sf = NULL; 1620 *dmParallel = NULL; 1621 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1622 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1623 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1624 if (size == 1) PetscFunctionReturn(0); 1625 1626 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1627 /* Create cell partition */ 1628 ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr); 1629 ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr); 1630 ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr); 1631 ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr); 1632 ierr = PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr); 1633 { 1634 /* Convert partition to DMLabel */ 1635 IS is; 1636 PetscHSetI ht; 1637 const PetscInt *points; 1638 PetscInt *iranks; 1639 PetscInt pStart, pEnd, proc, npoints, poff = 0, nranks; 1640 1641 ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr); 1642 /* Preallocate strata */ 1643 ierr = PetscHSetICreate(&ht);CHKERRQ(ierr); 1644 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr); 1645 for (proc = pStart; proc < pEnd; proc++) { 1646 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr); 1647 if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr);} 1648 } 1649 ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr); 1650 ierr = PetscMalloc1(nranks, &iranks);CHKERRQ(ierr); 1651 ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr); 1652 ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr); 1653 ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr); 1654 ierr = PetscFree(iranks);CHKERRQ(ierr); 1655 /* Inline DMPlexPartitionLabelClosure() */ 1656 ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr); 1657 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr); 1658 for (proc = pStart; proc < pEnd; proc++) { 1659 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr); 1660 if (!npoints) continue; 1661 ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr); 1662 ierr = DMPlexClosurePoints_Private(dm, npoints, points+poff, &is);CHKERRQ(ierr); 1663 ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr); 1664 ierr = ISDestroy(&is);CHKERRQ(ierr); 1665 } 1666 ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr); 1667 } 1668 ierr = PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr); 1669 1670 ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr); 1671 ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration);CHKERRQ(ierr); 1672 ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr); 1673 ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr); 1674 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr); 1675 sfMigration = sfStratified; 1676 ierr = PetscSFSetUp(sfMigration);CHKERRQ(ierr); 1677 ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr); 1678 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 1679 if (flg) { 1680 ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr); 1681 ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr); 1682 } 1683 1684 /* Create non-overlapping parallel DM and migrate internal data */ 1685 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr); 1686 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr); 1687 ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr); 1688 1689 /* Build the point SF without overlap */ 1690 ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr); 1691 ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr); 1692 ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr); 1693 ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr); 1694 ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr); 1695 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1696 if (flg) {ierr = PetscSFView(sfPoint, NULL);CHKERRQ(ierr);} 1697 1698 if (overlap > 0) { 1699 DM dmOverlap; 1700 PetscInt nroots, nleaves, noldleaves, l; 1701 const PetscInt *oldLeaves; 1702 PetscSFNode *newRemote, *permRemote; 1703 const PetscSFNode *oldRemote; 1704 PetscSF sfOverlap, sfOverlapPoint; 1705 1706 /* Add the partition overlap to the distributed DM */ 1707 ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr); 1708 ierr = DMDestroy(dmParallel);CHKERRQ(ierr); 1709 *dmParallel = dmOverlap; 1710 if (flg) { 1711 ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr); 1712 ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr); 1713 } 1714 1715 /* Re-map the migration SF to establish the full migration pattern */ 1716 ierr = PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote);CHKERRQ(ierr); 1717 ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr); 1718 ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr); 1719 /* oldRemote: original root point mapping to original leaf point 1720 newRemote: original leaf point mapping to overlapped leaf point */ 1721 if (oldLeaves) { 1722 /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */ 1723 ierr = PetscMalloc1(noldleaves, &permRemote);CHKERRQ(ierr); 1724 for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l]; 1725 oldRemote = permRemote; 1726 } 1727 ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1728 ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1729 if (oldLeaves) {ierr = PetscFree(oldRemote);CHKERRQ(ierr);} 1730 ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr); 1731 ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1732 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr); 1733 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr); 1734 sfMigration = sfOverlapPoint; 1735 } 1736 /* Cleanup Partition */ 1737 ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr); 1738 ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr); 1739 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr); 1740 ierr = ISDestroy(&cellPart);CHKERRQ(ierr); 1741 /* Copy BC */ 1742 ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr); 1743 /* Create sfNatural */ 1744 if (dm->useNatural) { 1745 PetscSection section; 1746 1747 ierr = DMGetLocalSection(dm, §ion);CHKERRQ(ierr); 1748 ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr); 1749 ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr); 1750 } 1751 /* Cleanup */ 1752 if (sf) {*sf = sfMigration;} 1753 else {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);} 1754 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1755 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1756 PetscFunctionReturn(0); 1757 } 1758 1759 /*@C 1760 DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM. 1761 1762 Collective on dm 1763 1764 Input Parameter: 1765 + dm - The non-overlapping distrbuted DMPlex object 1766 - overlap - The overlap of partitions (the same on all ranks) 1767 1768 Output Parameter: 1769 + sf - The PetscSF used for point distribution 1770 - dmOverlap - The overlapping distributed DMPlex object, or NULL 1771 1772 Notes: 1773 If the mesh was not distributed, the return value is NULL. 1774 1775 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function 1776 representation on the mesh. 1777 1778 Level: advanced 1779 1780 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexDistribute(), DMPlexCreateOverlapLabel(), DMPlexGetOverlap() 1781 @*/ 1782 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap) 1783 { 1784 MPI_Comm comm; 1785 PetscMPIInt size, rank; 1786 PetscSection rootSection, leafSection; 1787 IS rootrank, leafrank; 1788 DM dmCoord; 1789 DMLabel lblOverlap; 1790 PetscSF sfOverlap, sfStratified, sfPoint; 1791 PetscErrorCode ierr; 1792 1793 PetscFunctionBegin; 1794 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1795 PetscValidLogicalCollectiveInt(dm, overlap, 2); 1796 if (sf) PetscValidPointer(sf, 3); 1797 PetscValidPointer(dmOverlap, 4); 1798 1799 if (sf) *sf = NULL; 1800 *dmOverlap = NULL; 1801 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1802 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1803 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1804 if (size == 1) PetscFunctionReturn(0); 1805 1806 ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr); 1807 /* Compute point overlap with neighbouring processes on the distributed DM */ 1808 ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr); 1809 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 1810 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr); 1811 ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr); 1812 ierr = DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr); 1813 /* Convert overlap label to stratified migration SF */ 1814 ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr); 1815 ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr); 1816 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr); 1817 sfOverlap = sfStratified; 1818 ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr); 1819 ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr); 1820 1821 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 1822 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 1823 ierr = ISDestroy(&rootrank);CHKERRQ(ierr); 1824 ierr = ISDestroy(&leafrank);CHKERRQ(ierr); 1825 ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr); 1826 1827 /* Build the overlapping DM */ 1828 ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr); 1829 ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr); 1830 ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr); 1831 /* Store the overlap in the new DM */ 1832 ((DM_Plex*)(*dmOverlap)->data)->overlap = overlap + ((DM_Plex*)dm->data)->overlap; 1833 /* Build the new point SF */ 1834 ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr); 1835 ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr); 1836 ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr); 1837 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1838 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1839 /* Cleanup overlap partition */ 1840 ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr); 1841 if (sf) *sf = sfOverlap; 1842 else {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);} 1843 ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr); 1844 PetscFunctionReturn(0); 1845 } 1846 1847 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap) 1848 { 1849 DM_Plex *mesh = (DM_Plex*) dm->data; 1850 1851 PetscFunctionBegin; 1852 *overlap = mesh->overlap; 1853 PetscFunctionReturn(0); 1854 } 1855 1856 /*@ 1857 DMPlexGetOverlap - Get the DMPlex partition overlap. 1858 1859 Not collective 1860 1861 Input Parameter: 1862 . dm - The DM 1863 1864 Output Parameters: 1865 . overlap - The overlap of this DM 1866 1867 Level: intermediate 1868 1869 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap(), DMPlexCreateOverlapLabel() 1870 @*/ 1871 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap) 1872 { 1873 PetscErrorCode ierr; 1874 1875 PetscFunctionBegin; 1876 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1877 ierr = PetscUseMethod(dm,"DMPlexGetOverlap_C",(DM,PetscInt*),(dm,overlap));CHKERRQ(ierr); 1878 PetscFunctionReturn(0); 1879 } 1880 1881 1882 /*@C 1883 DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the 1884 root process of the original's communicator. 1885 1886 Collective on dm 1887 1888 Input Parameters: 1889 . dm - the original DMPlex object 1890 1891 Output Parameters: 1892 + sf - the PetscSF used for point distribution (optional) 1893 - gatherMesh - the gathered DM object, or NULL 1894 1895 Level: intermediate 1896 1897 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM() 1898 @*/ 1899 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh) 1900 { 1901 MPI_Comm comm; 1902 PetscMPIInt size; 1903 PetscPartitioner oldPart, gatherPart; 1904 PetscErrorCode ierr; 1905 1906 PetscFunctionBegin; 1907 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1908 PetscValidPointer(gatherMesh,2); 1909 *gatherMesh = NULL; 1910 if (sf) *sf = NULL; 1911 comm = PetscObjectComm((PetscObject)dm); 1912 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1913 if (size == 1) PetscFunctionReturn(0); 1914 ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr); 1915 ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr); 1916 ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr); 1917 ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr); 1918 ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr); 1919 ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr); 1920 1921 ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr); 1922 ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr); 1923 ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr); 1924 PetscFunctionReturn(0); 1925 } 1926 1927 /*@C 1928 DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process. 1929 1930 Collective on dm 1931 1932 Input Parameters: 1933 . dm - the original DMPlex object 1934 1935 Output Parameters: 1936 + sf - the PetscSF used for point distribution (optional) 1937 - redundantMesh - the redundant DM object, or NULL 1938 1939 Level: intermediate 1940 1941 .seealso: DMPlexDistribute(), DMPlexGetGatherDM() 1942 @*/ 1943 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh) 1944 { 1945 MPI_Comm comm; 1946 PetscMPIInt size, rank; 1947 PetscInt pStart, pEnd, p; 1948 PetscInt numPoints = -1; 1949 PetscSF migrationSF, sfPoint, gatherSF; 1950 DM gatherDM, dmCoord; 1951 PetscSFNode *points; 1952 PetscErrorCode ierr; 1953 1954 PetscFunctionBegin; 1955 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1956 PetscValidPointer(redundantMesh,2); 1957 *redundantMesh = NULL; 1958 comm = PetscObjectComm((PetscObject)dm); 1959 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1960 if (size == 1) { 1961 ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr); 1962 *redundantMesh = dm; 1963 if (sf) *sf = NULL; 1964 PetscFunctionReturn(0); 1965 } 1966 ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr); 1967 if (!gatherDM) PetscFunctionReturn(0); 1968 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1969 ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr); 1970 numPoints = pEnd - pStart; 1971 ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr); 1972 ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr); 1973 ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr); 1974 for (p = 0; p < numPoints; p++) { 1975 points[p].index = p; 1976 points[p].rank = 0; 1977 } 1978 ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr); 1979 ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr); 1980 ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr); 1981 ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr); 1982 ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr); 1983 ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr); 1984 ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr); 1985 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1986 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1987 if (sf) { 1988 PetscSF tsf; 1989 1990 ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr); 1991 ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr); 1992 ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr); 1993 } 1994 ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr); 1995 ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr); 1996 ierr = DMDestroy(&gatherDM);CHKERRQ(ierr); 1997 PetscFunctionReturn(0); 1998 } 1999