1 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 #include <petsc/private/dmlabelimpl.h> /*I "petscdmlabel.h" I*/ 3 4 /*@C 5 DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback 6 7 Input Parameters: 8 + dm - The DM object 9 . user - The user callback, may be NULL (to clear the callback) 10 - ctx - context for callback evaluation, may be NULL 11 12 Level: advanced 13 14 Notes: 15 The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency. 16 17 Any setting here overrides other configuration of DMPlex adjacency determination. 18 19 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser() 20 @*/ 21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx) 22 { 23 DM_Plex *mesh = (DM_Plex *)dm->data; 24 25 PetscFunctionBegin; 26 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 27 mesh->useradjacency = user; 28 mesh->useradjacencyctx = ctx; 29 PetscFunctionReturn(0); 30 } 31 32 /*@C 33 DMPlexGetAdjacencyUser - get the user-defined adjacency callback 34 35 Input Parameter: 36 . dm - The DM object 37 38 Output Parameters: 39 - user - The user callback 40 - ctx - context for callback evaluation 41 42 Level: advanced 43 44 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser() 45 @*/ 46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx) 47 { 48 DM_Plex *mesh = (DM_Plex *)dm->data; 49 50 PetscFunctionBegin; 51 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 52 if (user) *user = mesh->useradjacency; 53 if (ctx) *ctx = mesh->useradjacencyctx; 54 PetscFunctionReturn(0); 55 } 56 57 /*@ 58 DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first 59 60 Input Parameters: 61 + dm - The DM object 62 - useCone - Flag to use the cone first 63 64 Level: intermediate 65 66 Notes: 67 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 68 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 69 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 70 71 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 72 @*/ 73 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone) 74 { 75 PetscDS prob; 76 PetscBool useClosure; 77 PetscInt Nf; 78 PetscErrorCode ierr; 79 80 PetscFunctionBegin; 81 ierr = DMGetDS(dm, &prob);CHKERRQ(ierr); 82 ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr); 83 if (!Nf) { 84 ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, &useClosure);CHKERRQ(ierr); 85 ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr); 86 } else { 87 ierr = PetscDSGetAdjacency(prob, 0, NULL, &useClosure);CHKERRQ(ierr); 88 ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr); 89 } 90 PetscFunctionReturn(0); 91 } 92 93 /*@ 94 DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first 95 96 Input Parameter: 97 . dm - The DM object 98 99 Output Parameter: 100 . useCone - Flag to use the cone first 101 102 Level: intermediate 103 104 Notes: 105 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 106 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 107 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 108 109 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 110 @*/ 111 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone) 112 { 113 PetscDS prob; 114 PetscInt Nf; 115 PetscErrorCode ierr; 116 117 PetscFunctionBegin; 118 ierr = DMGetDS(dm, &prob);CHKERRQ(ierr); 119 ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr); 120 if (!Nf) { 121 ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, useCone, NULL);CHKERRQ(ierr); 122 } else { 123 ierr = PetscDSGetAdjacency(prob, 0, useCone, NULL);CHKERRQ(ierr); 124 } 125 PetscFunctionReturn(0); 126 } 127 128 /*@ 129 DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure 130 131 Input Parameters: 132 + dm - The DM object 133 - useClosure - Flag to use the closure 134 135 Level: intermediate 136 137 Notes: 138 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 139 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 140 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 141 142 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 143 @*/ 144 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure) 145 { 146 PetscDS prob; 147 PetscBool useCone; 148 PetscInt Nf; 149 PetscErrorCode ierr; 150 151 PetscFunctionBegin; 152 ierr = DMGetDS(dm, &prob);CHKERRQ(ierr); 153 ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr); 154 if (!Nf) { 155 ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, &useCone, NULL);CHKERRQ(ierr); 156 ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr); 157 } else { 158 ierr = PetscDSGetAdjacency(prob, 0, &useCone, NULL);CHKERRQ(ierr); 159 ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr); 160 } 161 PetscFunctionReturn(0); 162 } 163 164 /*@ 165 DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure 166 167 Input Parameter: 168 . dm - The DM object 169 170 Output Parameter: 171 . useClosure - Flag to use the closure 172 173 Level: intermediate 174 175 Notes: 176 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 177 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 178 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 179 180 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 181 @*/ 182 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure) 183 { 184 PetscDS prob; 185 PetscInt Nf; 186 PetscErrorCode ierr; 187 188 PetscFunctionBegin; 189 ierr = DMGetDS(dm, &prob);CHKERRQ(ierr); 190 ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr); 191 if (!Nf) { 192 ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, useClosure);CHKERRQ(ierr); 193 } else { 194 ierr = PetscDSGetAdjacency(prob, 0, NULL, useClosure);CHKERRQ(ierr); 195 } 196 PetscFunctionReturn(0); 197 } 198 199 /*@ 200 DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints. 201 202 Input Parameters: 203 + dm - The DM object 204 - useAnchors - Flag to use the constraints. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 205 206 Level: intermediate 207 208 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 209 @*/ 210 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors) 211 { 212 DM_Plex *mesh = (DM_Plex *) dm->data; 213 214 PetscFunctionBegin; 215 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 216 mesh->useAnchors = useAnchors; 217 PetscFunctionReturn(0); 218 } 219 220 /*@ 221 DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints. 222 223 Input Parameter: 224 . dm - The DM object 225 226 Output Parameter: 227 . useAnchors - Flag to use the closure. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 228 229 Level: intermediate 230 231 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 232 @*/ 233 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors) 234 { 235 DM_Plex *mesh = (DM_Plex *) dm->data; 236 237 PetscFunctionBegin; 238 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 239 PetscValidIntPointer(useAnchors, 2); 240 *useAnchors = mesh->useAnchors; 241 PetscFunctionReturn(0); 242 } 243 244 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 245 { 246 const PetscInt *cone = NULL; 247 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c; 248 PetscErrorCode ierr; 249 250 PetscFunctionBeginHot; 251 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 252 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 253 for (c = 0; c <= coneSize; ++c) { 254 const PetscInt point = !c ? p : cone[c-1]; 255 const PetscInt *support = NULL; 256 PetscInt supportSize, s, q; 257 258 ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr); 259 ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr); 260 for (s = 0; s < supportSize; ++s) { 261 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) { 262 if (support[s] == adj[q]) break; 263 } 264 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 265 } 266 } 267 *adjSize = numAdj; 268 PetscFunctionReturn(0); 269 } 270 271 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 272 { 273 const PetscInt *support = NULL; 274 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s; 275 PetscErrorCode ierr; 276 277 PetscFunctionBeginHot; 278 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr); 279 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr); 280 for (s = 0; s <= supportSize; ++s) { 281 const PetscInt point = !s ? p : support[s-1]; 282 const PetscInt *cone = NULL; 283 PetscInt coneSize, c, q; 284 285 ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr); 286 ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr); 287 for (c = 0; c < coneSize; ++c) { 288 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) { 289 if (cone[c] == adj[q]) break; 290 } 291 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 292 } 293 } 294 *adjSize = numAdj; 295 PetscFunctionReturn(0); 296 } 297 298 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[]) 299 { 300 PetscInt *star = NULL; 301 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s; 302 PetscErrorCode ierr; 303 304 PetscFunctionBeginHot; 305 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 306 for (s = 0; s < starSize*2; s += 2) { 307 const PetscInt *closure = NULL; 308 PetscInt closureSize, c, q; 309 310 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 311 for (c = 0; c < closureSize*2; c += 2) { 312 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) { 313 if (closure[c] == adj[q]) break; 314 } 315 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 316 } 317 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 318 } 319 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 320 *adjSize = numAdj; 321 PetscFunctionReturn(0); 322 } 323 324 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[]) 325 { 326 static PetscInt asiz = 0; 327 PetscInt maxAnchors = 1; 328 PetscInt aStart = -1, aEnd = -1; 329 PetscInt maxAdjSize; 330 PetscSection aSec = NULL; 331 IS aIS = NULL; 332 const PetscInt *anchors; 333 DM_Plex *mesh = (DM_Plex *)dm->data; 334 PetscErrorCode ierr; 335 336 PetscFunctionBeginHot; 337 if (useAnchors) { 338 ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr); 339 if (aSec) { 340 ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr); 341 maxAnchors = PetscMax(1,maxAnchors); 342 ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr); 343 ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr); 344 } 345 } 346 if (!*adj) { 347 PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd; 348 349 ierr = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr); 350 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 351 ierr = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr); 352 coneSeries = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1; 353 supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1; 354 asiz = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries); 355 asiz *= maxAnchors; 356 asiz = PetscMin(asiz,pEnd-pStart); 357 ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr); 358 } 359 if (*adjSize < 0) *adjSize = asiz; 360 maxAdjSize = *adjSize; 361 if (mesh->useradjacency) { 362 ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr); 363 } else if (useTransitiveClosure) { 364 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr); 365 } else if (useCone) { 366 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 367 } else { 368 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 369 } 370 if (useAnchors && aSec) { 371 PetscInt origSize = *adjSize; 372 PetscInt numAdj = origSize; 373 PetscInt i = 0, j; 374 PetscInt *orig = *adj; 375 376 while (i < origSize) { 377 PetscInt p = orig[i]; 378 PetscInt aDof = 0; 379 380 if (p >= aStart && p < aEnd) { 381 ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr); 382 } 383 if (aDof) { 384 PetscInt aOff; 385 PetscInt s, q; 386 387 for (j = i + 1; j < numAdj; j++) { 388 orig[j - 1] = orig[j]; 389 } 390 origSize--; 391 numAdj--; 392 ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr); 393 for (s = 0; s < aDof; ++s) { 394 for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) { 395 if (anchors[aOff+s] == orig[q]) break; 396 } 397 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 398 } 399 } 400 else { 401 i++; 402 } 403 } 404 *adjSize = numAdj; 405 ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr); 406 } 407 PetscFunctionReturn(0); 408 } 409 410 /*@ 411 DMPlexGetAdjacency - Return all points adjacent to the given point 412 413 Input Parameters: 414 + dm - The DM object 415 . p - The point 416 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE 417 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize 418 419 Output Parameters: 420 + adjSize - The number of adjacent points 421 - adj - The adjacent points 422 423 Level: advanced 424 425 Notes: The user must PetscFree the adj array if it was not passed in. 426 427 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator() 428 @*/ 429 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[]) 430 { 431 PetscBool useCone, useClosure, useAnchors; 432 PetscErrorCode ierr; 433 434 PetscFunctionBeginHot; 435 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 436 PetscValidPointer(adjSize,3); 437 PetscValidPointer(adj,4); 438 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 439 ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr); 440 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr); 441 ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr); 442 PetscFunctionReturn(0); 443 } 444 445 /*@ 446 DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity 447 448 Collective on DM 449 450 Input Parameters: 451 + dm - The DM 452 - sfPoint - The PetscSF which encodes point connectivity 453 454 Output Parameters: 455 + processRanks - A list of process neighbors, or NULL 456 - sfProcess - An SF encoding the two-sided process connectivity, or NULL 457 458 Level: developer 459 460 .seealso: PetscSFCreate(), DMPlexCreateProcessSF() 461 @*/ 462 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess) 463 { 464 const PetscSFNode *remotePoints; 465 PetscInt *localPointsNew; 466 PetscSFNode *remotePointsNew; 467 const PetscInt *nranks; 468 PetscInt *ranksNew; 469 PetscBT neighbors; 470 PetscInt pStart, pEnd, p, numLeaves, l, numNeighbors, n; 471 PetscMPIInt size, proc, rank; 472 PetscErrorCode ierr; 473 474 PetscFunctionBegin; 475 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 476 PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2); 477 if (processRanks) {PetscValidPointer(processRanks, 3);} 478 if (sfProcess) {PetscValidPointer(sfProcess, 4);} 479 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr); 480 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 481 ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr); 482 ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr); 483 ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr); 484 /* Compute root-to-leaf process connectivity */ 485 ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr); 486 ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr); 487 for (p = pStart; p < pEnd; ++p) { 488 PetscInt ndof, noff, n; 489 490 ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr); 491 ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr); 492 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);} 493 } 494 ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr); 495 /* Compute leaf-to-neighbor process connectivity */ 496 ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr); 497 ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr); 498 for (p = pStart; p < pEnd; ++p) { 499 PetscInt ndof, noff, n; 500 501 ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr); 502 ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr); 503 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);} 504 } 505 ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr); 506 /* Compute leaf-to-root process connectivity */ 507 for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);} 508 /* Calculate edges */ 509 PetscBTClear(neighbors, rank); 510 for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;} 511 ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr); 512 ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr); 513 ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr); 514 for(proc = 0, n = 0; proc < size; ++proc) { 515 if (PetscBTLookup(neighbors, proc)) { 516 ranksNew[n] = proc; 517 localPointsNew[n] = proc; 518 remotePointsNew[n].index = rank; 519 remotePointsNew[n].rank = proc; 520 ++n; 521 } 522 } 523 ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr); 524 if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);} 525 else {ierr = PetscFree(ranksNew);CHKERRQ(ierr);} 526 if (sfProcess) { 527 ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr); 528 ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr); 529 ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr); 530 ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr); 531 } 532 PetscFunctionReturn(0); 533 } 534 535 /*@ 536 DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF. 537 538 Collective on DM 539 540 Input Parameter: 541 . dm - The DM 542 543 Output Parameters: 544 + rootSection - The number of leaves for a given root point 545 . rootrank - The rank of each edge into the root point 546 . leafSection - The number of processes sharing a given leaf point 547 - leafrank - The rank of each process sharing a leaf point 548 549 Level: developer 550 551 .seealso: DMPlexCreateOverlap() 552 @*/ 553 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank) 554 { 555 MPI_Comm comm; 556 PetscSF sfPoint; 557 const PetscInt *rootdegree; 558 PetscInt *myrank, *remoterank; 559 PetscInt pStart, pEnd, p, nedges; 560 PetscMPIInt rank; 561 PetscErrorCode ierr; 562 563 PetscFunctionBegin; 564 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 565 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 566 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 567 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 568 /* Compute number of leaves for each root */ 569 ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr); 570 ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr); 571 ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr); 572 ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr); 573 for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);} 574 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 575 /* Gather rank of each leaf to root */ 576 ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr); 577 ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr); 578 ierr = PetscMalloc1(nedges, &remoterank);CHKERRQ(ierr); 579 for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank; 580 ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 581 ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 582 ierr = PetscFree(myrank);CHKERRQ(ierr); 583 ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr); 584 /* Distribute remote ranks to leaves */ 585 ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr); 586 ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr); 587 PetscFunctionReturn(0); 588 } 589 590 /*@C 591 DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF. 592 593 Collective on DM 594 595 Input Parameters: 596 + dm - The DM 597 . levels - Number of overlap levels 598 . rootSection - The number of leaves for a given root point 599 . rootrank - The rank of each edge into the root point 600 . leafSection - The number of processes sharing a given leaf point 601 - leafrank - The rank of each process sharing a leaf point 602 603 Output Parameters: 604 + ovLabel - DMLabel containing remote overlap contributions as point/rank pairings 605 606 Level: developer 607 608 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute() 609 @*/ 610 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel) 611 { 612 MPI_Comm comm; 613 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */ 614 PetscSF sfPoint, sfProc; 615 const PetscSFNode *remote; 616 const PetscInt *local; 617 const PetscInt *nrank, *rrank; 618 PetscInt *adj = NULL; 619 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l; 620 PetscMPIInt rank, size; 621 PetscBool useCone, useClosure, flg; 622 PetscErrorCode ierr; 623 624 PetscFunctionBegin; 625 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 626 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 627 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 628 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 629 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 630 ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr); 631 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 632 ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr); 633 /* Handle leaves: shared with the root point */ 634 for (l = 0; l < nleaves; ++l) { 635 PetscInt adjSize = PETSC_DETERMINE, a; 636 637 ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr); 638 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);} 639 } 640 ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr); 641 ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr); 642 /* Handle roots */ 643 for (p = pStart; p < pEnd; ++p) { 644 PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a; 645 646 if ((p >= sStart) && (p < sEnd)) { 647 /* Some leaves share a root with other leaves on different processes */ 648 ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr); 649 if (neighbors) { 650 ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr); 651 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 652 for (n = 0; n < neighbors; ++n) { 653 const PetscInt remoteRank = nrank[noff+n]; 654 655 if (remoteRank == rank) continue; 656 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 657 } 658 } 659 } 660 /* Roots are shared with leaves */ 661 ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr); 662 if (!neighbors) continue; 663 ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr); 664 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 665 for (n = 0; n < neighbors; ++n) { 666 const PetscInt remoteRank = rrank[noff+n]; 667 668 if (remoteRank == rank) continue; 669 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 670 } 671 } 672 ierr = PetscFree(adj);CHKERRQ(ierr); 673 ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr); 674 ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr); 675 /* Add additional overlap levels */ 676 for (l = 1; l < levels; l++) { 677 /* Propagate point donations over SF to capture remote connections */ 678 ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr); 679 /* Add next level of point donations to the label */ 680 ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr); 681 } 682 /* We require the closure in the overlap */ 683 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 684 ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr); 685 if (useCone || !useClosure) { 686 ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr); 687 } 688 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr); 689 if (flg) { 690 ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 691 } 692 /* Make global process SF and invert sender to receiver label */ 693 { 694 /* Build a global process SF */ 695 PetscSFNode *remoteProc; 696 ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr); 697 for (p = 0; p < size; ++p) { 698 remoteProc[p].rank = p; 699 remoteProc[p].index = rank; 700 } 701 ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr); 702 ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr); 703 ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr); 704 } 705 ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr); 706 ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr); 707 /* Add owned points, except for shared local points */ 708 for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);} 709 for (l = 0; l < nleaves; ++l) { 710 ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr); 711 ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr); 712 } 713 /* Clean up */ 714 ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr); 715 ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr); 716 PetscFunctionReturn(0); 717 } 718 719 /*@C 720 DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF 721 722 Collective on DM 723 724 Input Parameters: 725 + dm - The DM 726 - overlapSF - The SF mapping ghost points in overlap to owner points on other processes 727 728 Output Parameters: 729 + migrationSF - An SF that maps original points in old locations to points in new locations 730 731 Level: developer 732 733 .seealso: DMPlexCreateOverlap(), DMPlexDistribute() 734 @*/ 735 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF) 736 { 737 MPI_Comm comm; 738 PetscMPIInt rank, size; 739 PetscInt d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints; 740 PetscInt *pointDepths, *remoteDepths, *ilocal; 741 PetscInt *depthRecv, *depthShift, *depthIdx; 742 PetscSFNode *iremote; 743 PetscSF pointSF; 744 const PetscInt *sharedLocal; 745 const PetscSFNode *overlapRemote, *sharedRemote; 746 PetscErrorCode ierr; 747 748 PetscFunctionBegin; 749 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 750 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 751 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 752 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 753 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 754 755 /* Before building the migration SF we need to know the new stratum offsets */ 756 ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr); 757 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr); 758 for (d=0; d<dim+1; d++) { 759 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 760 for (p=pStart; p<pEnd; p++) pointDepths[p] = d; 761 } 762 for (p=0; p<nleaves; p++) remoteDepths[p] = -1; 763 ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 764 ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 765 766 /* Count recevied points in each stratum and compute the internal strata shift */ 767 ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr); 768 for (d=0; d<dim+1; d++) depthRecv[d]=0; 769 for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++; 770 depthShift[dim] = 0; 771 for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim]; 772 for (d=1; d<dim; d++) depthShift[d] += depthRecv[0]; 773 for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1]; 774 for (d=0; d<dim+1; d++) { 775 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 776 depthIdx[d] = pStart + depthShift[d]; 777 } 778 779 /* Form the overlap SF build an SF that describes the full overlap migration SF */ 780 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 781 newLeaves = pEnd - pStart + nleaves; 782 ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr); 783 ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr); 784 /* First map local points to themselves */ 785 for (d=0; d<dim+1; d++) { 786 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 787 for (p=pStart; p<pEnd; p++) { 788 point = p + depthShift[d]; 789 ilocal[point] = point; 790 iremote[point].index = p; 791 iremote[point].rank = rank; 792 depthIdx[d]++; 793 } 794 } 795 796 /* Add in the remote roots for currently shared points */ 797 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr); 798 ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr); 799 for (d=0; d<dim+1; d++) { 800 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 801 for (p=0; p<numSharedPoints; p++) { 802 if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) { 803 point = sharedLocal[p] + depthShift[d]; 804 iremote[point].index = sharedRemote[p].index; 805 iremote[point].rank = sharedRemote[p].rank; 806 } 807 } 808 } 809 810 /* Now add the incoming overlap points */ 811 for (p=0; p<nleaves; p++) { 812 point = depthIdx[remoteDepths[p]]; 813 ilocal[point] = point; 814 iremote[point].index = overlapRemote[p].index; 815 iremote[point].rank = overlapRemote[p].rank; 816 depthIdx[remoteDepths[p]]++; 817 } 818 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr); 819 820 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr); 821 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr); 822 ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr); 823 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 824 ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr); 825 826 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr); 827 PetscFunctionReturn(0); 828 } 829 830 /*@ 831 DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification. 832 833 Input Parameter: 834 + dm - The DM 835 - sf - A star forest with non-ordered leaves, usually defining a DM point migration 836 837 Output Parameter: 838 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified 839 840 Level: developer 841 842 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap() 843 @*/ 844 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF) 845 { 846 MPI_Comm comm; 847 PetscMPIInt rank, size; 848 PetscInt d, ldepth, depth, p, pStart, pEnd, nroots, nleaves; 849 PetscInt *pointDepths, *remoteDepths, *ilocal; 850 PetscInt *depthRecv, *depthShift, *depthIdx; 851 PetscInt hybEnd[4]; 852 const PetscSFNode *iremote; 853 PetscErrorCode ierr; 854 855 PetscFunctionBegin; 856 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 857 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 858 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 859 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 860 ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr); 861 ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr); 862 if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth); 863 864 /* Before building the migration SF we need to know the new stratum offsets */ 865 ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr); 866 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr); 867 ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr); 868 for (d = 0; d < depth+1; ++d) { 869 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 870 for (p = pStart; p < pEnd; ++p) { 871 if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */ 872 pointDepths[p] = 2 * d; 873 } else { 874 pointDepths[p] = 2 * d + 1; 875 } 876 } 877 } 878 for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1; 879 ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 880 ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 881 /* Count recevied points in each stratum and compute the internal strata shift */ 882 ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr); 883 for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0; 884 for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++; 885 depthShift[2*depth+1] = 0; 886 for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1]; 887 for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth]; 888 depthShift[0] += depthRecv[1]; 889 for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1]; 890 for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0]; 891 for (d = 2 * depth-1; d > 2; --d) { 892 PetscInt e; 893 894 for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d]; 895 } 896 for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;} 897 /* Derive a new local permutation based on stratified indices */ 898 ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr); 899 for (p = 0; p < nleaves; ++p) { 900 const PetscInt dep = remoteDepths[p]; 901 902 ilocal[p] = depthShift[dep] + depthIdx[dep]; 903 depthIdx[dep]++; 904 } 905 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr); 906 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr); 907 ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr); 908 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr); 909 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr); 910 PetscFunctionReturn(0); 911 } 912 913 /*@ 914 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 915 916 Collective on DM 917 918 Input Parameters: 919 + dm - The DMPlex object 920 . pointSF - The PetscSF describing the communication pattern 921 . originalSection - The PetscSection for existing data layout 922 - originalVec - The existing data 923 924 Output Parameters: 925 + newSection - The PetscSF describing the new data layout 926 - newVec - The new data 927 928 Level: developer 929 930 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData() 931 @*/ 932 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec) 933 { 934 PetscSF fieldSF; 935 PetscInt *remoteOffsets, fieldSize; 936 PetscScalar *originalValues, *newValues; 937 PetscErrorCode ierr; 938 939 PetscFunctionBegin; 940 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 941 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 942 943 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 944 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr); 945 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr); 946 947 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr); 948 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr); 949 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 950 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 951 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 952 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 953 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 954 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr); 955 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr); 956 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 957 PetscFunctionReturn(0); 958 } 959 960 /*@ 961 DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 962 963 Collective on DM 964 965 Input Parameters: 966 + dm - The DMPlex object 967 . pointSF - The PetscSF describing the communication pattern 968 . originalSection - The PetscSection for existing data layout 969 - originalIS - The existing data 970 971 Output Parameters: 972 + newSection - The PetscSF describing the new data layout 973 - newIS - The new data 974 975 Level: developer 976 977 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData() 978 @*/ 979 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS) 980 { 981 PetscSF fieldSF; 982 PetscInt *newValues, *remoteOffsets, fieldSize; 983 const PetscInt *originalValues; 984 PetscErrorCode ierr; 985 986 PetscFunctionBegin; 987 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 988 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 989 990 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 991 ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr); 992 993 ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr); 994 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 995 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 996 ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 997 ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 998 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 999 ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr); 1000 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr); 1001 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 1002 PetscFunctionReturn(0); 1003 } 1004 1005 /*@ 1006 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 1007 1008 Collective on DM 1009 1010 Input Parameters: 1011 + dm - The DMPlex object 1012 . pointSF - The PetscSF describing the communication pattern 1013 . originalSection - The PetscSection for existing data layout 1014 . datatype - The type of data 1015 - originalData - The existing data 1016 1017 Output Parameters: 1018 + newSection - The PetscSection describing the new data layout 1019 - newData - The new data 1020 1021 Level: developer 1022 1023 .seealso: DMPlexDistribute(), DMPlexDistributeField() 1024 @*/ 1025 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData) 1026 { 1027 PetscSF fieldSF; 1028 PetscInt *remoteOffsets, fieldSize; 1029 PetscMPIInt dataSize; 1030 PetscErrorCode ierr; 1031 1032 PetscFunctionBegin; 1033 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 1034 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 1035 1036 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 1037 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr); 1038 ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr); 1039 1040 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 1041 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 1042 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 1043 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 1044 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 1045 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 1046 PetscFunctionReturn(0); 1047 } 1048 1049 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel) 1050 { 1051 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1052 MPI_Comm comm; 1053 PetscSF coneSF; 1054 PetscSection originalConeSection, newConeSection; 1055 PetscInt *remoteOffsets, *cones, *globCones, *newCones, newConesSize; 1056 PetscBool flg; 1057 PetscErrorCode ierr; 1058 1059 PetscFunctionBegin; 1060 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1061 PetscValidPointer(dmParallel,4); 1062 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 1063 1064 /* Distribute cone section */ 1065 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1066 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr); 1067 ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr); 1068 ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr); 1069 ierr = DMSetUp(dmParallel);CHKERRQ(ierr); 1070 { 1071 PetscInt pStart, pEnd, p; 1072 1073 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr); 1074 for (p = pStart; p < pEnd; ++p) { 1075 PetscInt coneSize; 1076 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr); 1077 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize); 1078 } 1079 } 1080 /* Communicate and renumber cones */ 1081 ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr); 1082 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 1083 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); 1084 if (original) { 1085 PetscInt numCones; 1086 1087 ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr); 1088 ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr); 1089 } 1090 else { 1091 globCones = cones; 1092 } 1093 ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr); 1094 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr); 1095 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr); 1096 if (original) { 1097 ierr = PetscFree(globCones);CHKERRQ(ierr); 1098 } 1099 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr); 1100 ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr); 1101 #if PETSC_USE_DEBUG 1102 { 1103 PetscInt p; 1104 PetscBool valid = PETSC_TRUE; 1105 for (p = 0; p < newConesSize; ++p) { 1106 if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);} 1107 } 1108 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map"); 1109 } 1110 #endif 1111 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr); 1112 if (flg) { 1113 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr); 1114 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1115 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr); 1116 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1117 ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr); 1118 } 1119 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr); 1120 ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr); 1121 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 1122 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 1123 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr); 1124 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 1125 /* Create supports and stratify DMPlex */ 1126 { 1127 PetscInt pStart, pEnd; 1128 1129 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr); 1130 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr); 1131 } 1132 ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr); 1133 ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr); 1134 { 1135 PetscBool useCone, useClosure, useAnchors; 1136 1137 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 1138 ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr); 1139 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr); 1140 ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr); 1141 ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr); 1142 ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr); 1143 } 1144 PetscFunctionReturn(0); 1145 } 1146 1147 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel) 1148 { 1149 MPI_Comm comm; 1150 PetscSection originalCoordSection, newCoordSection; 1151 Vec originalCoordinates, newCoordinates; 1152 PetscInt bs; 1153 PetscBool isper; 1154 const char *name; 1155 const PetscReal *maxCell, *L; 1156 const DMBoundaryType *bd; 1157 PetscErrorCode ierr; 1158 1159 PetscFunctionBegin; 1160 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1161 PetscValidPointer(dmParallel, 3); 1162 1163 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1164 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr); 1165 ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr); 1166 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr); 1167 if (originalCoordinates) { 1168 ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr); 1169 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr); 1170 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr); 1171 1172 ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr); 1173 ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr); 1174 ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr); 1175 ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr); 1176 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr); 1177 } 1178 ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr); 1179 ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr); 1180 PetscFunctionReturn(0); 1181 } 1182 1183 /* Here we are assuming that process 0 always has everything */ 1184 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel) 1185 { 1186 DM_Plex *mesh = (DM_Plex*) dm->data; 1187 MPI_Comm comm; 1188 DMLabel depthLabel; 1189 PetscMPIInt rank; 1190 PetscInt depth, d, numLabels, numLocalLabels, l; 1191 PetscBool hasLabels = PETSC_FALSE, lsendDepth, sendDepth; 1192 PetscObjectState depthState = -1; 1193 PetscErrorCode ierr; 1194 1195 PetscFunctionBegin; 1196 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1197 PetscValidHeaderSpecific(dm, DM_CLASSID, 3); 1198 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1199 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1200 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1201 1202 /* If the user has changed the depth label, communicate it instead */ 1203 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 1204 ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr); 1205 if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);} 1206 lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE; 1207 ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr); 1208 if (sendDepth) { 1209 ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr); 1210 ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr); 1211 } 1212 /* Everyone must have either the same number of labels, or none */ 1213 ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr); 1214 numLabels = numLocalLabels; 1215 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr); 1216 if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE; 1217 for (l = numLabels-1; l >= 0; --l) { 1218 DMLabel label = NULL, labelNew = NULL; 1219 PetscBool isDepth, lisOutput = PETSC_TRUE, isOutput; 1220 1221 if (hasLabels) {ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);} 1222 /* Skip "depth" because it is recreated */ 1223 if (hasLabels) {ierr = PetscStrcmp(label->name, "depth", &isDepth);CHKERRQ(ierr);} 1224 ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr); 1225 if (isDepth && !sendDepth) continue; 1226 ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr); 1227 if (isDepth) { 1228 /* Put in any missing strata which can occur if users are managing the depth label themselves */ 1229 PetscInt gdepth; 1230 1231 ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr); 1232 if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth); 1233 for (d = 0; d <= gdepth; ++d) { 1234 PetscBool has; 1235 1236 ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr); 1237 if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);} 1238 } 1239 } 1240 ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr); 1241 /* Put the output flag in the new label */ 1242 if (hasLabels) {ierr = DMGetLabelOutput(dm, label->name, &lisOutput);CHKERRQ(ierr);} 1243 ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr); 1244 ierr = DMSetLabelOutput(dmParallel, labelNew->name, isOutput);CHKERRQ(ierr); 1245 } 1246 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1247 PetscFunctionReturn(0); 1248 } 1249 1250 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel) 1251 { 1252 DM_Plex *mesh = (DM_Plex*) dm->data; 1253 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1254 PetscBool *isHybrid, *isHybridParallel; 1255 PetscInt dim, depth, d; 1256 PetscInt pStart, pEnd, pStartP, pEndP; 1257 PetscErrorCode ierr; 1258 1259 PetscFunctionBegin; 1260 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1261 PetscValidPointer(dmParallel, 4); 1262 1263 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1264 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 1265 ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr); 1266 ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr); 1267 ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr); 1268 for (d = 0; d <= depth; d++) { 1269 PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d]; 1270 1271 if (hybridMax >= 0) { 1272 PetscInt sStart, sEnd, p; 1273 1274 ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr); 1275 for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE; 1276 } 1277 } 1278 ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr); 1279 ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr); 1280 for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1; 1281 for (d = 0; d <= depth; d++) { 1282 PetscInt sStart, sEnd, p, dd; 1283 1284 ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr); 1285 dd = (depth == 1 && d == 1) ? dim : d; 1286 for (p = sStart; p < sEnd; p++) { 1287 if (isHybridParallel[p-pStartP]) { 1288 pmesh->hybridPointMax[dd] = p; 1289 break; 1290 } 1291 } 1292 } 1293 ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr); 1294 PetscFunctionReturn(0); 1295 } 1296 1297 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel) 1298 { 1299 DM_Plex *mesh = (DM_Plex*) dm->data; 1300 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1301 MPI_Comm comm; 1302 DM refTree; 1303 PetscSection origParentSection, newParentSection; 1304 PetscInt *origParents, *origChildIDs; 1305 PetscBool flg; 1306 PetscErrorCode ierr; 1307 1308 PetscFunctionBegin; 1309 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1310 PetscValidHeaderSpecific(dm, DM_CLASSID, 4); 1311 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1312 1313 /* Set up tree */ 1314 ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr); 1315 ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr); 1316 ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr); 1317 if (origParentSection) { 1318 PetscInt pStart, pEnd; 1319 PetscInt *newParents, *newChildIDs, *globParents; 1320 PetscInt *remoteOffsetsParents, newParentSize; 1321 PetscSF parentSF; 1322 1323 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1324 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr); 1325 ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr); 1326 ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr); 1327 ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr); 1328 ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr); 1329 ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr); 1330 ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr); 1331 if (original) { 1332 PetscInt numParents; 1333 1334 ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr); 1335 ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr); 1336 ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr); 1337 } 1338 else { 1339 globParents = origParents; 1340 } 1341 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr); 1342 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr); 1343 if (original) { 1344 ierr = PetscFree(globParents);CHKERRQ(ierr); 1345 } 1346 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1347 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1348 ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr); 1349 #if PETSC_USE_DEBUG 1350 { 1351 PetscInt p; 1352 PetscBool valid = PETSC_TRUE; 1353 for (p = 0; p < newParentSize; ++p) { 1354 if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);} 1355 } 1356 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map"); 1357 } 1358 #endif 1359 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr); 1360 if (flg) { 1361 ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr); 1362 ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1363 ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr); 1364 ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1365 ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr); 1366 } 1367 ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr); 1368 ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr); 1369 ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr); 1370 ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr); 1371 } 1372 pmesh->useAnchors = mesh->useAnchors; 1373 PetscFunctionReturn(0); 1374 } 1375 1376 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel) 1377 { 1378 PetscMPIInt rank, size; 1379 MPI_Comm comm; 1380 PetscErrorCode ierr; 1381 1382 PetscFunctionBegin; 1383 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1384 PetscValidPointer(dmParallel,7); 1385 1386 /* Create point SF for parallel mesh */ 1387 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1388 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1389 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1390 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1391 { 1392 const PetscInt *leaves; 1393 PetscSFNode *remotePoints, *rowners, *lowners; 1394 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints; 1395 PetscInt pStart, pEnd; 1396 1397 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1398 ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr); 1399 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr); 1400 for (p=0; p<numRoots; p++) { 1401 rowners[p].rank = -1; 1402 rowners[p].index = -1; 1403 } 1404 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1405 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1406 for (p = 0; p < numLeaves; ++p) { 1407 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */ 1408 lowners[p].rank = rank; 1409 lowners[p].index = leaves ? leaves[p] : p; 1410 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */ 1411 lowners[p].rank = -2; 1412 lowners[p].index = -2; 1413 } 1414 } 1415 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */ 1416 rowners[p].rank = -3; 1417 rowners[p].index = -3; 1418 } 1419 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1420 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1421 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1422 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1423 for (p = 0; p < numLeaves; ++p) { 1424 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed"); 1425 if (lowners[p].rank != rank) ++numGhostPoints; 1426 } 1427 ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr); 1428 ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr); 1429 for (p = 0, gp = 0; p < numLeaves; ++p) { 1430 if (lowners[p].rank != rank) { 1431 ghostPoints[gp] = leaves ? leaves[p] : p; 1432 remotePoints[gp].rank = lowners[p].rank; 1433 remotePoints[gp].index = lowners[p].index; 1434 ++gp; 1435 } 1436 } 1437 ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr); 1438 ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 1439 ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr); 1440 } 1441 { 1442 PetscBool useCone, useClosure, useAnchors; 1443 1444 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 1445 ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr); 1446 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr); 1447 ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr); 1448 ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr); 1449 ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr); 1450 } 1451 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1452 PetscFunctionReturn(0); 1453 } 1454 1455 /*@C 1456 DMPlexDerivePointSF - Build a point SF from an SF describing a point migration 1457 1458 Input Parameter: 1459 + dm - The source DMPlex object 1460 . migrationSF - The star forest that describes the parallel point remapping 1461 . ownership - Flag causing a vote to determine point ownership 1462 1463 Output Parameter: 1464 - pointSF - The star forest describing the point overlap in the remapped DM 1465 1466 Level: developer 1467 1468 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap() 1469 @*/ 1470 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF) 1471 { 1472 PetscMPIInt rank; 1473 PetscInt p, nroots, nleaves, idx, npointLeaves; 1474 PetscInt *pointLocal; 1475 const PetscInt *leaves; 1476 const PetscSFNode *roots; 1477 PetscSFNode *rootNodes, *leafNodes, *pointRemote; 1478 PetscErrorCode ierr; 1479 1480 PetscFunctionBegin; 1481 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1482 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 1483 1484 ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr); 1485 ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr); 1486 if (ownership) { 1487 /* Point ownership vote: Process with highest rank ownes shared points */ 1488 for (p = 0; p < nleaves; ++p) { 1489 /* Either put in a bid or we know we own it */ 1490 leafNodes[p].rank = rank; 1491 leafNodes[p].index = p; 1492 } 1493 for (p = 0; p < nroots; p++) { 1494 /* Root must not participate in the reduction, flag so that MAXLOC does not use */ 1495 rootNodes[p].rank = -3; 1496 rootNodes[p].index = -3; 1497 } 1498 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr); 1499 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr); 1500 } else { 1501 for (p = 0; p < nroots; p++) { 1502 rootNodes[p].index = -1; 1503 rootNodes[p].rank = rank; 1504 }; 1505 for (p = 0; p < nleaves; p++) { 1506 /* Write new local id into old location */ 1507 if (roots[p].rank == rank) { 1508 rootNodes[roots[p].index].index = leaves ? leaves[p] : p; 1509 } 1510 } 1511 } 1512 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr); 1513 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr); 1514 1515 for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;} 1516 ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr); 1517 ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr); 1518 for (idx = 0, p = 0; p < nleaves; p++) { 1519 if (leafNodes[p].rank != rank) { 1520 pointLocal[idx] = p; 1521 pointRemote[idx] = leafNodes[p]; 1522 idx++; 1523 } 1524 } 1525 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr); 1526 ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr); 1527 ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1528 ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr); 1529 PetscFunctionReturn(0); 1530 } 1531 1532 /*@C 1533 DMPlexMigrate - Migrates internal DM data over the supplied star forest 1534 1535 Input Parameter: 1536 + dm - The source DMPlex object 1537 . sf - The star forest communication context describing the migration pattern 1538 1539 Output Parameter: 1540 - targetDM - The target DMPlex object 1541 1542 Level: intermediate 1543 1544 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap() 1545 @*/ 1546 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM) 1547 { 1548 MPI_Comm comm; 1549 PetscInt dim, nroots; 1550 PetscSF sfPoint; 1551 ISLocalToGlobalMapping ltogMigration; 1552 ISLocalToGlobalMapping ltogOriginal = NULL; 1553 PetscBool flg; 1554 PetscErrorCode ierr; 1555 1556 PetscFunctionBegin; 1557 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1558 ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr); 1559 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 1560 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1561 ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr); 1562 1563 /* Check for a one-to-all distribution pattern */ 1564 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 1565 ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 1566 if (nroots >= 0) { 1567 IS isOriginal; 1568 PetscInt n, size, nleaves; 1569 PetscInt *numbering_orig, *numbering_new; 1570 /* Get the original point numbering */ 1571 ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr); 1572 ierr = ISLocalToGlobalMappingCreateIS(isOriginal, <ogOriginal);CHKERRQ(ierr); 1573 ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr); 1574 ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr); 1575 /* Convert to positive global numbers */ 1576 for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);} 1577 /* Derive the new local-to-global mapping from the old one */ 1578 ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr); 1579 ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr); 1580 ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr); 1581 ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr); 1582 ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, <ogMigration);CHKERRQ(ierr); 1583 ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr); 1584 ierr = ISDestroy(&isOriginal);CHKERRQ(ierr); 1585 } else { 1586 /* One-to-all distribution pattern: We can derive LToG from SF */ 1587 ierr = ISLocalToGlobalMappingCreateSF(sf, 0, <ogMigration);CHKERRQ(ierr); 1588 } 1589 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 1590 if (flg) { 1591 ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr); 1592 ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr); 1593 } 1594 /* Migrate DM data to target DM */ 1595 ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr); 1596 ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr); 1597 ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr); 1598 ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr); 1599 ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr); 1600 ierr = ISLocalToGlobalMappingDestroy(<ogOriginal);CHKERRQ(ierr); 1601 ierr = ISLocalToGlobalMappingDestroy(<ogMigration);CHKERRQ(ierr); 1602 ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr); 1603 PetscFunctionReturn(0); 1604 } 1605 1606 /*@C 1607 DMPlexDistribute - Distributes the mesh and any associated sections. 1608 1609 Not Collective 1610 1611 Input Parameter: 1612 + dm - The original DMPlex object 1613 - overlap - The overlap of partitions, 0 is the default 1614 1615 Output Parameter: 1616 + sf - The PetscSF used for point distribution 1617 - parallelMesh - The distributed DMPlex object, or NULL 1618 1619 Note: If the mesh was not distributed, the return value is NULL. 1620 1621 The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and 1622 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 1623 representation on the mesh. 1624 1625 Level: intermediate 1626 1627 .keywords: mesh, elements 1628 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 1629 @*/ 1630 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel) 1631 { 1632 MPI_Comm comm; 1633 PetscPartitioner partitioner; 1634 IS cellPart; 1635 PetscSection cellPartSection; 1636 DM dmCoord; 1637 DMLabel lblPartition, lblMigration; 1638 PetscSF sfProcess, sfMigration, sfStratified, sfPoint; 1639 PetscBool flg; 1640 PetscMPIInt rank, size, p; 1641 PetscErrorCode ierr; 1642 1643 PetscFunctionBegin; 1644 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1645 if (sf) PetscValidPointer(sf,4); 1646 PetscValidPointer(dmParallel,5); 1647 1648 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1649 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1650 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1651 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1652 1653 if (sf) *sf = NULL; 1654 *dmParallel = NULL; 1655 if (size == 1) { 1656 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1657 PetscFunctionReturn(0); 1658 } 1659 1660 /* Create cell partition */ 1661 ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1662 ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr); 1663 ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr); 1664 ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr); 1665 { 1666 /* Convert partition to DMLabel */ 1667 PetscInt proc, pStart, pEnd, npoints, poffset; 1668 const PetscInt *points; 1669 ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr); 1670 ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr); 1671 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr); 1672 for (proc = pStart; proc < pEnd; proc++) { 1673 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr); 1674 ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr); 1675 for (p = poffset; p < poffset+npoints; p++) { 1676 ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr); 1677 } 1678 } 1679 ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr); 1680 } 1681 ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr); 1682 { 1683 /* Build a global process SF */ 1684 PetscSFNode *remoteProc; 1685 ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr); 1686 for (p = 0; p < size; ++p) { 1687 remoteProc[p].rank = p; 1688 remoteProc[p].index = rank; 1689 } 1690 ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr); 1691 ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr); 1692 ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr); 1693 } 1694 ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr); 1695 ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr); 1696 ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr); 1697 /* Stratify the SF in case we are migrating an already parallel plex */ 1698 ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr); 1699 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr); 1700 sfMigration = sfStratified; 1701 ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1702 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 1703 if (flg) { 1704 ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1705 ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1706 } 1707 1708 /* Create non-overlapping parallel DM and migrate internal data */ 1709 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr); 1710 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr); 1711 ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr); 1712 1713 /* Build the point SF without overlap */ 1714 ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr); 1715 ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr); 1716 ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr); 1717 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1718 if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);} 1719 1720 if (overlap > 0) { 1721 DM dmOverlap; 1722 PetscInt nroots, nleaves; 1723 PetscSFNode *newRemote; 1724 const PetscSFNode *oldRemote; 1725 PetscSF sfOverlap, sfOverlapPoint; 1726 /* Add the partition overlap to the distributed DM */ 1727 ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr); 1728 ierr = DMDestroy(dmParallel);CHKERRQ(ierr); 1729 *dmParallel = dmOverlap; 1730 if (flg) { 1731 ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr); 1732 ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr); 1733 } 1734 1735 /* Re-map the migration SF to establish the full migration pattern */ 1736 ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr); 1737 ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr); 1738 ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr); 1739 ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1740 ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1741 ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr); 1742 ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1743 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr); 1744 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr); 1745 sfMigration = sfOverlapPoint; 1746 } 1747 /* Cleanup Partition */ 1748 ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr); 1749 ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr); 1750 ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr); 1751 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr); 1752 ierr = ISDestroy(&cellPart);CHKERRQ(ierr); 1753 /* Copy BC */ 1754 ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr); 1755 /* Create sfNatural */ 1756 if (dm->useNatural) { 1757 PetscSection section; 1758 1759 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1760 ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr); 1761 ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr); 1762 } 1763 /* Cleanup */ 1764 if (sf) {*sf = sfMigration;} 1765 else {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);} 1766 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1767 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1768 PetscFunctionReturn(0); 1769 } 1770 1771 /*@C 1772 DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM. 1773 1774 Not Collective 1775 1776 Input Parameter: 1777 + dm - The non-overlapping distrbuted DMPlex object 1778 - overlap - The overlap of partitions, 0 is the default 1779 1780 Output Parameter: 1781 + sf - The PetscSF used for point distribution 1782 - dmOverlap - The overlapping distributed DMPlex object, or NULL 1783 1784 Note: If the mesh was not distributed, the return value is NULL. 1785 1786 The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and 1787 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 1788 representation on the mesh. 1789 1790 Level: intermediate 1791 1792 .keywords: mesh, elements 1793 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 1794 @*/ 1795 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap) 1796 { 1797 MPI_Comm comm; 1798 PetscMPIInt size, rank; 1799 PetscSection rootSection, leafSection; 1800 IS rootrank, leafrank; 1801 DM dmCoord; 1802 DMLabel lblOverlap; 1803 PetscSF sfOverlap, sfStratified, sfPoint; 1804 PetscErrorCode ierr; 1805 1806 PetscFunctionBegin; 1807 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1808 if (sf) PetscValidPointer(sf, 3); 1809 PetscValidPointer(dmOverlap, 4); 1810 1811 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1812 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1813 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1814 if (size == 1) {*dmOverlap = NULL; PetscFunctionReturn(0);} 1815 ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr); 1816 1817 /* Compute point overlap with neighbouring processes on the distributed DM */ 1818 ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1819 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 1820 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr); 1821 ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr); 1822 ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr); 1823 /* Convert overlap label to stratified migration SF */ 1824 ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr); 1825 ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr); 1826 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr); 1827 sfOverlap = sfStratified; 1828 ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr); 1829 ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr); 1830 1831 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 1832 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 1833 ierr = ISDestroy(&rootrank);CHKERRQ(ierr); 1834 ierr = ISDestroy(&leafrank);CHKERRQ(ierr); 1835 ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1836 1837 /* Build the overlapping DM */ 1838 ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr); 1839 ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr); 1840 ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr); 1841 /* Build the new point SF */ 1842 ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr); 1843 ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr); 1844 ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr); 1845 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1846 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1847 /* Cleanup overlap partition */ 1848 ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr); 1849 if (sf) *sf = sfOverlap; 1850 else {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);} 1851 ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr); 1852 PetscFunctionReturn(0); 1853 } 1854 1855 /*@C 1856 DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the 1857 root process of the original's communicator. 1858 1859 Input Parameters: 1860 . dm - the original DMPlex object 1861 1862 Output Parameters: 1863 . gatherMesh - the gathered DM object, or NULL 1864 1865 Level: intermediate 1866 1867 .keywords: mesh 1868 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM() 1869 @*/ 1870 PetscErrorCode DMPlexGetGatherDM(DM dm, DM * gatherMesh) 1871 { 1872 MPI_Comm comm; 1873 PetscMPIInt size; 1874 PetscPartitioner oldPart, gatherPart; 1875 PetscErrorCode ierr; 1876 1877 PetscFunctionBegin; 1878 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1879 comm = PetscObjectComm((PetscObject)dm); 1880 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1881 *gatherMesh = NULL; 1882 if (size == 1) PetscFunctionReturn(0); 1883 ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr); 1884 ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr); 1885 ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr); 1886 ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr); 1887 ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr); 1888 ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr); 1889 ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr); 1890 ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr); 1891 ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr); 1892 PetscFunctionReturn(0); 1893 } 1894 1895 /*@C 1896 DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process. 1897 1898 Input Parameters: 1899 . dm - the original DMPlex object 1900 1901 Output Parameters: 1902 . redundantMesh - the redundant DM object, or NULL 1903 1904 Level: intermediate 1905 1906 .keywords: mesh 1907 .seealso: DMPlexDistribute(), DMPlexGetGatherDM() 1908 @*/ 1909 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM * redundantMesh) 1910 { 1911 MPI_Comm comm; 1912 PetscMPIInt size, rank; 1913 PetscInt pStart, pEnd, p; 1914 PetscInt numPoints = -1; 1915 PetscSF migrationSF, sfPoint; 1916 DM gatherDM, dmCoord; 1917 PetscSFNode *points; 1918 PetscErrorCode ierr; 1919 1920 PetscFunctionBegin; 1921 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1922 comm = PetscObjectComm((PetscObject)dm); 1923 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1924 *redundantMesh = NULL; 1925 if (size == 1) { 1926 ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr); 1927 *redundantMesh = dm; 1928 PetscFunctionReturn(0); 1929 } 1930 ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr); 1931 if (!gatherDM) PetscFunctionReturn(0); 1932 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1933 ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr); 1934 numPoints = pEnd - pStart; 1935 ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr); 1936 ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr); 1937 ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr); 1938 for (p = 0; p < numPoints; p++) { 1939 points[p].index = p; 1940 points[p].rank = 0; 1941 } 1942 ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr); 1943 ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr); 1944 ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr); 1945 ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr); 1946 ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr); 1947 ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr); 1948 ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr); 1949 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1950 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1951 ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr); 1952 ierr = DMDestroy(&gatherDM);CHKERRQ(ierr); 1953 PetscFunctionReturn(0); 1954 } 1955