1 #include <petsc-private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 #include <petscsf.h> 3 4 #undef __FUNCT__ 5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone" 6 /*@ 7 DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first 8 9 Input Parameters: 10 + dm - The DM object 11 - useCone - Flag to use the cone first 12 13 Level: intermediate 14 15 Notes: 16 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 17 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 18 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 19 20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 21 @*/ 22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone) 23 { 24 DM_Plex *mesh = (DM_Plex *) dm->data; 25 26 PetscFunctionBegin; 27 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 28 mesh->useCone = useCone; 29 PetscFunctionReturn(0); 30 } 31 32 #undef __FUNCT__ 33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone" 34 /*@ 35 DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first 36 37 Input Parameter: 38 . dm - The DM object 39 40 Output Parameter: 41 . useCone - Flag to use the cone first 42 43 Level: intermediate 44 45 Notes: 46 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 47 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 48 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 49 50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 51 @*/ 52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone) 53 { 54 DM_Plex *mesh = (DM_Plex *) dm->data; 55 56 PetscFunctionBegin; 57 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 58 PetscValidIntPointer(useCone, 2); 59 *useCone = mesh->useCone; 60 PetscFunctionReturn(0); 61 } 62 63 #undef __FUNCT__ 64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure" 65 /*@ 66 DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure 67 68 Input Parameters: 69 + dm - The DM object 70 - useClosure - Flag to use the closure 71 72 Level: intermediate 73 74 Notes: 75 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 76 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 77 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 78 79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 80 @*/ 81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure) 82 { 83 DM_Plex *mesh = (DM_Plex *) dm->data; 84 85 PetscFunctionBegin; 86 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 87 mesh->useClosure = useClosure; 88 PetscFunctionReturn(0); 89 } 90 91 #undef __FUNCT__ 92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure" 93 /*@ 94 DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure 95 96 Input Parameter: 97 . dm - The DM object 98 99 Output Parameter: 100 . useClosure - Flag to use the closure 101 102 Level: intermediate 103 104 Notes: 105 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 106 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 107 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 108 109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 110 @*/ 111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure) 112 { 113 DM_Plex *mesh = (DM_Plex *) dm->data; 114 115 PetscFunctionBegin; 116 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 117 PetscValidIntPointer(useClosure, 2); 118 *useClosure = mesh->useClosure; 119 PetscFunctionReturn(0); 120 } 121 122 #undef __FUNCT__ 123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors" 124 /*@ 125 DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints. 126 127 Input Parameters: 128 + dm - The DM object 129 - useAnchors - Flag to use the constraints. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 130 131 Level: intermediate 132 133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 134 @*/ 135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors) 136 { 137 DM_Plex *mesh = (DM_Plex *) dm->data; 138 139 PetscFunctionBegin; 140 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 141 mesh->useAnchors = useAnchors; 142 PetscFunctionReturn(0); 143 } 144 145 #undef __FUNCT__ 146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors" 147 /*@ 148 DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints. 149 150 Input Parameter: 151 . dm - The DM object 152 153 Output Parameter: 154 . useAnchors - Flag to use the closure. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 155 156 Level: intermediate 157 158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 159 @*/ 160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors) 161 { 162 DM_Plex *mesh = (DM_Plex *) dm->data; 163 164 PetscFunctionBegin; 165 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 166 PetscValidIntPointer(useAnchors, 2); 167 *useAnchors = mesh->useAnchors; 168 PetscFunctionReturn(0); 169 } 170 171 #undef __FUNCT__ 172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal" 173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 174 { 175 const PetscInt *cone = NULL; 176 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c; 177 PetscErrorCode ierr; 178 179 PetscFunctionBeginHot; 180 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 181 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 182 for (c = 0; c <= coneSize; ++c) { 183 const PetscInt point = !c ? p : cone[c-1]; 184 const PetscInt *support = NULL; 185 PetscInt supportSize, s, q; 186 187 ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr); 188 ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr); 189 for (s = 0; s < supportSize; ++s) { 190 for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) { 191 if (support[s] == adj[q]) break; 192 } 193 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 194 } 195 } 196 *adjSize = numAdj; 197 PetscFunctionReturn(0); 198 } 199 200 #undef __FUNCT__ 201 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal" 202 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 203 { 204 const PetscInt *support = NULL; 205 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s; 206 PetscErrorCode ierr; 207 208 PetscFunctionBeginHot; 209 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr); 210 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr); 211 for (s = 0; s <= supportSize; ++s) { 212 const PetscInt point = !s ? p : support[s-1]; 213 const PetscInt *cone = NULL; 214 PetscInt coneSize, c, q; 215 216 ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr); 217 ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr); 218 for (c = 0; c < coneSize; ++c) { 219 for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) { 220 if (cone[c] == adj[q]) break; 221 } 222 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 223 } 224 } 225 *adjSize = numAdj; 226 PetscFunctionReturn(0); 227 } 228 229 #undef __FUNCT__ 230 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal" 231 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[]) 232 { 233 PetscInt *star = NULL; 234 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s; 235 PetscErrorCode ierr; 236 237 PetscFunctionBeginHot; 238 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 239 for (s = 0; s < starSize*2; s += 2) { 240 const PetscInt *closure = NULL; 241 PetscInt closureSize, c, q; 242 243 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 244 for (c = 0; c < closureSize*2; c += 2) { 245 for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) { 246 if (closure[c] == adj[q]) break; 247 } 248 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 249 } 250 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 251 } 252 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 253 *adjSize = numAdj; 254 PetscFunctionReturn(0); 255 } 256 257 #undef __FUNCT__ 258 #define __FUNCT__ "DMPlexGetAdjacency_Internal" 259 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[]) 260 { 261 static PetscInt asiz = 0; 262 PetscInt maxAnchors = 1; 263 PetscInt aStart = -1, aEnd = -1; 264 PetscInt maxAdjSize; 265 PetscSection aSec = NULL; 266 IS aIS = NULL; 267 const PetscInt *anchors; 268 PetscErrorCode ierr; 269 270 PetscFunctionBeginHot; 271 if (useAnchors) { 272 ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr); 273 if (aSec) { 274 ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr); 275 maxAnchors = PetscMax(1,maxAnchors); 276 ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr); 277 ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr); 278 } 279 } 280 if (!*adj) { 281 PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd; 282 283 ierr = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr); 284 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 285 ierr = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr); 286 coneSeries = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1; 287 supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1; 288 asiz = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries); 289 asiz *= maxAnchors; 290 asiz = PetscMin(asiz,pEnd-pStart); 291 ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr); 292 } 293 if (*adjSize < 0) *adjSize = asiz; 294 maxAdjSize = *adjSize; 295 if (useTransitiveClosure) { 296 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr); 297 } else if (useCone) { 298 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 299 } else { 300 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 301 } 302 if (useAnchors && aSec) { 303 PetscInt origSize = *adjSize; 304 PetscInt numAdj = origSize; 305 PetscInt i = 0, j; 306 PetscInt *orig = *adj; 307 308 while (i < origSize) { 309 PetscInt p = orig[i]; 310 PetscInt aDof = 0; 311 312 if (p >= aStart && p < aEnd) { 313 ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr); 314 } 315 if (aDof) { 316 PetscInt aOff; 317 PetscInt s, q; 318 319 for (j = i + 1; j < numAdj; j++) { 320 orig[j - 1] = orig[j]; 321 } 322 origSize--; 323 numAdj--; 324 ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr); 325 for (s = 0; s < aDof; ++s) { 326 for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) { 327 if (anchors[aOff+s] == orig[q]) break; 328 } 329 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 330 } 331 } 332 else { 333 i++; 334 } 335 } 336 *adjSize = numAdj; 337 ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr); 338 } 339 PetscFunctionReturn(0); 340 } 341 342 #undef __FUNCT__ 343 #define __FUNCT__ "DMPlexGetAdjacency" 344 /*@ 345 DMPlexGetAdjacency - Return all points adjacent to the given point 346 347 Input Parameters: 348 + dm - The DM object 349 . p - The point 350 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE 351 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize 352 353 Output Parameters: 354 + adjSize - The number of adjacent points 355 - adj - The adjacent points 356 357 Level: advanced 358 359 Notes: The user must PetscFree the adj array if it was not passed in. 360 361 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator() 362 @*/ 363 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[]) 364 { 365 DM_Plex *mesh = (DM_Plex *) dm->data; 366 PetscErrorCode ierr; 367 368 PetscFunctionBeginHot; 369 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 370 PetscValidPointer(adjSize,3); 371 PetscValidPointer(adj,4); 372 ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr); 373 PetscFunctionReturn(0); 374 } 375 #undef __FUNCT__ 376 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF" 377 /*@ 378 DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity 379 380 Collective on DM 381 382 Input Parameters: 383 + dm - The DM 384 - sfPoint - The PetscSF which encodes point connectivity 385 386 Output Parameters: 387 + processRanks - A list of process neighbors, or NULL 388 - sfProcess - An SF encoding the two-sided process connectivity, or NULL 389 390 Level: developer 391 392 .seealso: PetscSFCreate(), DMPlexCreateProcessSF() 393 @*/ 394 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess) 395 { 396 const PetscSFNode *remotePoints; 397 PetscInt *localPointsNew; 398 PetscSFNode *remotePointsNew; 399 const PetscInt *nranks; 400 PetscInt *ranksNew; 401 PetscBT neighbors; 402 PetscInt pStart, pEnd, p, numLeaves, l, numNeighbors, n; 403 PetscMPIInt numProcs, proc, rank; 404 PetscErrorCode ierr; 405 406 PetscFunctionBegin; 407 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 408 PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2); 409 if (processRanks) {PetscValidPointer(processRanks, 3);} 410 if (sfProcess) {PetscValidPointer(sfProcess, 4);} 411 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 412 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 413 ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr); 414 ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr); 415 ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr); 416 /* Compute root-to-leaf process connectivity */ 417 ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr); 418 ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr); 419 for (p = pStart; p < pEnd; ++p) { 420 PetscInt ndof, noff, n; 421 422 ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr); 423 ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr); 424 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);} 425 } 426 ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr); 427 /* Compute leaf-to-neighbor process connectivity */ 428 ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr); 429 ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr); 430 for (p = pStart; p < pEnd; ++p) { 431 PetscInt ndof, noff, n; 432 433 ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr); 434 ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr); 435 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);} 436 } 437 ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr); 438 /* Compute leaf-to-root process connectivity */ 439 for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);} 440 /* Calculate edges */ 441 PetscBTClear(neighbors, rank); 442 for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;} 443 ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr); 444 ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr); 445 ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr); 446 for(proc = 0, n = 0; proc < numProcs; ++proc) { 447 if (PetscBTLookup(neighbors, proc)) { 448 ranksNew[n] = proc; 449 localPointsNew[n] = proc; 450 remotePointsNew[n].index = rank; 451 remotePointsNew[n].rank = proc; 452 ++n; 453 } 454 } 455 ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr); 456 if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);} 457 else {ierr = PetscFree(ranksNew);CHKERRQ(ierr);} 458 if (sfProcess) { 459 ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr); 460 ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr); 461 ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr); 462 ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr); 463 } 464 PetscFunctionReturn(0); 465 } 466 467 #undef __FUNCT__ 468 #define __FUNCT__ "DMPlexDistributeOwnership" 469 /*@ 470 DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF. 471 472 Collective on DM 473 474 Input Parameter: 475 . dm - The DM 476 477 Output Parameters: 478 + rootSection - The number of leaves for a given root point 479 . rootrank - The rank of each edge into the root point 480 . leafSection - The number of processes sharing a given leaf point 481 - leafrank - The rank of each process sharing a leaf point 482 483 Level: developer 484 485 .seealso: DMPlexCreateOverlap() 486 @*/ 487 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank) 488 { 489 MPI_Comm comm; 490 PetscSF sfPoint; 491 const PetscInt *rootdegree; 492 PetscInt *myrank, *remoterank; 493 PetscInt pStart, pEnd, p, nedges; 494 PetscMPIInt rank; 495 PetscErrorCode ierr; 496 497 PetscFunctionBegin; 498 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 499 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 500 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 501 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 502 /* Compute number of leaves for each root */ 503 ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr); 504 ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr); 505 ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr); 506 ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr); 507 for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);} 508 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 509 /* Gather rank of each leaf to root */ 510 ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr); 511 ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr); 512 ierr = PetscMalloc1(nedges, &remoterank);CHKERRQ(ierr); 513 for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank; 514 ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 515 ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 516 ierr = PetscFree(myrank);CHKERRQ(ierr); 517 ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr); 518 /* Distribute remote ranks to leaves */ 519 ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr); 520 ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr); 521 PetscFunctionReturn(0); 522 } 523 524 #undef __FUNCT__ 525 #define __FUNCT__ "DMPlexCreateOverlap" 526 /*@C 527 DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF. 528 529 Collective on DM 530 531 Input Parameters: 532 + dm - The DM 533 . rootSection - The number of leaves for a given root point 534 . rootrank - The rank of each edge into the root point 535 . leafSection - The number of processes sharing a given leaf point 536 - leafrank - The rank of each process sharing a leaf point 537 538 Output Parameters: 539 + ovRootSection - The number of new overlap points for each neighboring process 540 . ovRootPoints - The new overlap points for each neighboring process 541 . ovLeafSection - The number of new overlap points from each neighboring process 542 - ovLeafPoints - The new overlap points from each neighboring process 543 544 Level: developer 545 546 .seealso: DMPlexDistributeOwnership() 547 @*/ 548 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF) 549 { 550 MPI_Comm comm; 551 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */ 552 PetscSF sfPoint, sfProc; 553 IS valueIS; 554 DMLabel ovLeafLabel; 555 const PetscSFNode *remote; 556 const PetscInt *local; 557 const PetscInt *nrank, *rrank, *neighbors; 558 PetscSFNode *ovRootPoints, *ovLeafPoints, *remotePoints; 559 PetscSection ovRootSection, ovLeafSection; 560 PetscInt *adj = NULL; 561 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize; 562 PetscInt idx, numRemote; 563 PetscMPIInt rank, numProcs; 564 PetscBool useCone, useClosure, flg; 565 PetscErrorCode ierr; 566 567 PetscFunctionBegin; 568 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 569 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 570 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 571 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 572 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 573 ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr); 574 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 575 ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr); 576 /* Handle leaves: shared with the root point */ 577 for (l = 0; l < nleaves; ++l) { 578 PetscInt adjSize = PETSC_DETERMINE, a; 579 580 ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr); 581 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);} 582 } 583 ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr); 584 ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr); 585 /* Handle roots */ 586 for (p = pStart; p < pEnd; ++p) { 587 PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a; 588 589 if ((p >= sStart) && (p < sEnd)) { 590 /* Some leaves share a root with other leaves on different processes */ 591 ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr); 592 if (neighbors) { 593 ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr); 594 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 595 for (n = 0; n < neighbors; ++n) { 596 const PetscInt remoteRank = nrank[noff+n]; 597 598 if (remoteRank == rank) continue; 599 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 600 } 601 } 602 } 603 /* Roots are shared with leaves */ 604 ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr); 605 if (!neighbors) continue; 606 ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr); 607 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 608 for (n = 0; n < neighbors; ++n) { 609 const PetscInt remoteRank = rrank[noff+n]; 610 611 if (remoteRank == rank) continue; 612 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 613 } 614 } 615 ierr = PetscFree(adj);CHKERRQ(ierr); 616 ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr); 617 ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr); 618 /* We require the closure in the overlap */ 619 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 620 ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr); 621 if (useCone || !useClosure) { 622 IS rankIS, pointIS; 623 const PetscInt *ranks, *points; 624 PetscInt numRanks, numPoints, r, p; 625 626 ierr = DMLabelGetValueIS(ovAdjByRank, &rankIS);CHKERRQ(ierr); 627 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 628 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 629 for (r = 0; r < numRanks; ++r) { 630 const PetscInt rank = ranks[r]; 631 632 ierr = DMLabelGetStratumIS(ovAdjByRank, rank, &pointIS);CHKERRQ(ierr); 633 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 634 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 635 for (p = 0; p < numPoints; ++p) { 636 PetscInt *closure = NULL, closureSize, c; 637 638 ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 639 for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(ovAdjByRank, closure[c], rank);CHKERRQ(ierr);} 640 ierr = DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 641 } 642 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 643 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 644 } 645 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 646 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 647 } 648 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr); 649 if (flg) { 650 ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr); 651 ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 652 } 653 /* Convert to (point, rank) and use actual owners */ 654 ierr = PetscSectionCreate(comm, &ovRootSection);CHKERRQ(ierr); 655 ierr = PetscObjectSetName((PetscObject) ovRootSection, "Overlap Root Section");CHKERRQ(ierr); 656 ierr = PetscSectionSetChart(ovRootSection, 0, numProcs);CHKERRQ(ierr); 657 ierr = DMLabelGetValueIS(ovAdjByRank, &valueIS);CHKERRQ(ierr); 658 ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr); 659 ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr); 660 for (n = 0; n < numNeighbors; ++n) { 661 PetscInt numPoints; 662 663 ierr = DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);CHKERRQ(ierr); 664 ierr = PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);CHKERRQ(ierr); 665 } 666 ierr = PetscSectionSetUp(ovRootSection);CHKERRQ(ierr); 667 ierr = PetscSectionGetStorageSize(ovRootSection, &ovSize);CHKERRQ(ierr); 668 ierr = PetscMalloc1(ovSize, &ovRootPoints);CHKERRQ(ierr); 669 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 670 for (n = 0; n < numNeighbors; ++n) { 671 IS pointIS; 672 const PetscInt *points; 673 PetscInt off, numPoints, p; 674 675 ierr = PetscSectionGetOffset(ovRootSection, neighbors[n], &off);CHKERRQ(ierr); 676 ierr = DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);CHKERRQ(ierr); 677 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 678 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 679 for (p = 0; p < numPoints; ++p) { 680 ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr); 681 if (l >= 0) {ovRootPoints[off+p] = remote[l];} 682 else {ovRootPoints[off+p].index = points[p]; ovRootPoints[off+p].rank = rank;} 683 } 684 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 685 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 686 } 687 ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr); 688 ierr = ISDestroy(&valueIS);CHKERRQ(ierr); 689 ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr); 690 /* Make process SF */ 691 ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr); 692 if (flg) { 693 ierr = PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 694 } 695 /* Communicate overlap */ 696 ierr = PetscSectionCreate(comm, &ovLeafSection);CHKERRQ(ierr); 697 ierr = PetscObjectSetName((PetscObject) ovLeafSection, "Overlap Leaf Section");CHKERRQ(ierr); 698 ierr = DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, ovRootPoints, ovLeafSection, (void**) &ovLeafPoints);CHKERRQ(ierr); 699 /* Filter remote contributions (ovLeafPoints) into the overlapSF */ 700 ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr); 701 ierr = PetscSectionGetStorageSize(ovLeafSection, &ovSize);CHKERRQ(ierr); 702 for (p = 0; p < ovSize; p++) { 703 /* Don't import points from yourself */ 704 if (ovLeafPoints[p].rank == rank) continue; 705 ierr = DMLabelSetValue(ovLeafLabel, ovLeafPoints[p].index, ovLeafPoints[p].rank);CHKERRQ(ierr); 706 } 707 /* Don't import points already in the pointSF */ 708 for (l = 0; l < nleaves; ++l) { 709 ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr); 710 } 711 for (numRemote = 0, n = 0; n < numProcs; ++n) { 712 PetscInt numPoints; 713 ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr); 714 numRemote += numPoints; 715 } 716 ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr); 717 for (idx = 0, n = 0; n < numProcs; ++n) { 718 IS remoteRootIS; 719 PetscInt numPoints; 720 const PetscInt *remoteRoots; 721 ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr); 722 if (numPoints <= 0) continue; 723 ierr = DMLabelGetStratumIS(ovLeafLabel, n, &remoteRootIS);CHKERRQ(ierr); 724 ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 725 for (p = 0; p < numPoints; p++) { 726 remotePoints[idx].index = remoteRoots[p]; 727 remotePoints[idx].rank = n; 728 idx++; 729 } 730 ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 731 ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr); 732 } 733 ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr); 734 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), overlapSF);CHKERRQ(ierr); 735 ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr); 736 ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr); 737 ierr = PetscSFSetGraph(*overlapSF, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 738 if (flg) { 739 ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr); 740 ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 741 } 742 /* Clean up */ 743 ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr); 744 ierr = PetscFree(ovRootPoints);CHKERRQ(ierr); 745 ierr = PetscSectionDestroy(&ovRootSection);CHKERRQ(ierr); 746 ierr = PetscFree(ovLeafPoints);CHKERRQ(ierr); 747 ierr = PetscSectionDestroy(&ovLeafSection);CHKERRQ(ierr); 748 PetscFunctionReturn(0); 749 } 750 751 #undef __FUNCT__ 752 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF" 753 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF) 754 { 755 MPI_Comm comm; 756 PetscMPIInt rank, numProcs; 757 PetscInt d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints; 758 PetscInt *pointDepths, *remoteDepths, *ilocal; 759 PetscInt *depthRecv, *depthShift, *depthIdx; 760 PetscSFNode *iremote; 761 PetscSF pointSF; 762 const PetscInt *sharedLocal; 763 const PetscSFNode *overlapRemote, *sharedRemote; 764 PetscErrorCode ierr; 765 766 PetscFunctionBegin; 767 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 768 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 769 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 770 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 771 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 772 773 /* Before building the migration SF we need to know the new stratum offsets */ 774 ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr); 775 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr); 776 for (d=0; d<dim+1; d++) { 777 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 778 for (p=pStart; p<pEnd; p++) pointDepths[p] = d; 779 } 780 for (p=0; p<nleaves; p++) remoteDepths[p] = -1; 781 ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 782 ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 783 784 /* Count recevied points in each stratum and compute the internal strata shift */ 785 ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr); 786 for (d=0; d<dim+1; d++) depthRecv[d]=0; 787 for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++; 788 depthShift[dim] = 0; 789 for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim]; 790 for (d=1; d<dim; d++) depthShift[d] += depthRecv[0]; 791 for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1]; 792 for (d=0; d<dim+1; d++) { 793 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 794 depthIdx[d] = pStart + depthShift[d]; 795 } 796 797 /* Form the overlap SF build an SF that describes the full overlap migration SF */ 798 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 799 newLeaves = pEnd - pStart + nleaves; 800 ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr); 801 ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr); 802 /* First map local points to themselves */ 803 for (d=0; d<dim+1; d++) { 804 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 805 for (p=pStart; p<pEnd; p++) { 806 point = p + depthShift[d]; 807 ilocal[point] = point; 808 iremote[point].index = p; 809 iremote[point].rank = rank; 810 depthIdx[d]++; 811 } 812 } 813 814 /* Add in the remote roots for currently shared points */ 815 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr); 816 ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr); 817 for (d=0; d<dim+1; d++) { 818 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 819 for (p=0; p<numSharedPoints; p++) { 820 if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) { 821 point = sharedLocal[p] + depthShift[d]; 822 iremote[point].index = sharedRemote[p].index; 823 iremote[point].rank = sharedRemote[p].rank; 824 } 825 } 826 } 827 828 /* Now add the incoming overlap points */ 829 for (p=0; p<nleaves; p++) { 830 point = depthIdx[remoteDepths[p]]; 831 ilocal[point] = point; 832 iremote[point].index = overlapRemote[p].index; 833 iremote[point].rank = overlapRemote[p].rank; 834 depthIdx[remoteDepths[p]]++; 835 } 836 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr); 837 838 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr); 839 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr); 840 ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr); 841 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 842 ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr); 843 844 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr); 845 PetscFunctionReturn(0); 846 } 847 848 #undef __FUNCT__ 849 #define __FUNCT__ "DMPlexDistributeField" 850 /*@ 851 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 852 853 Collective on DM 854 855 Input Parameters: 856 + dm - The DMPlex object 857 . pointSF - The PetscSF describing the communication pattern 858 . originalSection - The PetscSection for existing data layout 859 - originalVec - The existing data 860 861 Output Parameters: 862 + newSection - The PetscSF describing the new data layout 863 - newVec - The new data 864 865 Level: developer 866 867 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData() 868 @*/ 869 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec) 870 { 871 PetscSF fieldSF; 872 PetscInt *remoteOffsets, fieldSize; 873 PetscScalar *originalValues, *newValues; 874 PetscErrorCode ierr; 875 876 PetscFunctionBegin; 877 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 878 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 879 880 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 881 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr); 882 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr); 883 884 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr); 885 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr); 886 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 887 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 888 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 889 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 890 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr); 891 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr); 892 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 893 PetscFunctionReturn(0); 894 } 895 896 #undef __FUNCT__ 897 #define __FUNCT__ "DMPlexDistributeFieldIS" 898 /*@ 899 DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 900 901 Collective on DM 902 903 Input Parameters: 904 + dm - The DMPlex object 905 . pointSF - The PetscSF describing the communication pattern 906 . originalSection - The PetscSection for existing data layout 907 - originalIS - The existing data 908 909 Output Parameters: 910 + newSection - The PetscSF describing the new data layout 911 - newIS - The new data 912 913 Level: developer 914 915 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData() 916 @*/ 917 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS) 918 { 919 PetscSF fieldSF; 920 PetscInt *newValues, *remoteOffsets, fieldSize; 921 const PetscInt *originalValues; 922 PetscErrorCode ierr; 923 924 PetscFunctionBegin; 925 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 926 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 927 928 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 929 ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr); 930 931 ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr); 932 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 933 ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 934 ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 935 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 936 ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr); 937 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr); 938 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 939 PetscFunctionReturn(0); 940 } 941 942 #undef __FUNCT__ 943 #define __FUNCT__ "DMPlexDistributeData" 944 /*@ 945 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 946 947 Collective on DM 948 949 Input Parameters: 950 + dm - The DMPlex object 951 . pointSF - The PetscSF describing the communication pattern 952 . originalSection - The PetscSection for existing data layout 953 . datatype - The type of data 954 - originalData - The existing data 955 956 Output Parameters: 957 + newSection - The PetscSection describing the new data layout 958 - newData - The new data 959 960 Level: developer 961 962 .seealso: DMPlexDistribute(), DMPlexDistributeField() 963 @*/ 964 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData) 965 { 966 PetscSF fieldSF; 967 PetscInt *remoteOffsets, fieldSize; 968 PetscMPIInt dataSize; 969 PetscErrorCode ierr; 970 971 PetscFunctionBegin; 972 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 973 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 974 975 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 976 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr); 977 ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr); 978 979 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 980 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 981 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 982 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 983 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 984 PetscFunctionReturn(0); 985 } 986 987 #undef __FUNCT__ 988 #define __FUNCT__ "DMPlexDistributeCones" 989 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel) 990 { 991 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 992 MPI_Comm comm; 993 PetscSF coneSF; 994 PetscSection originalConeSection, newConeSection; 995 PetscInt *remoteOffsets, *cones, *newCones, newConesSize; 996 PetscBool flg; 997 PetscErrorCode ierr; 998 999 PetscFunctionBegin; 1000 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1001 PetscValidPointer(dmParallel,4); 1002 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 1003 1004 /* Distribute cone section */ 1005 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1006 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr); 1007 ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr); 1008 ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr); 1009 ierr = DMSetUp(dmParallel);CHKERRQ(ierr); 1010 { 1011 PetscInt pStart, pEnd, p; 1012 1013 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr); 1014 for (p = pStart; p < pEnd; ++p) { 1015 PetscInt coneSize; 1016 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr); 1017 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize); 1018 } 1019 } 1020 /* Communicate and renumber cones */ 1021 ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr); 1022 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); 1023 ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr); 1024 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 1025 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 1026 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr); 1027 ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr); 1028 #if PETSC_USE_DEBUG 1029 { 1030 PetscInt p; 1031 PetscBool valid = PETSC_TRUE; 1032 for (p = 0; p < newConesSize; ++p) { 1033 if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);} 1034 } 1035 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map"); 1036 } 1037 #endif 1038 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr); 1039 if (flg) { 1040 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr); 1041 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1042 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr); 1043 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1044 ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr); 1045 } 1046 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr); 1047 ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr); 1048 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 1049 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 1050 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr); 1051 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 1052 /* Create supports and stratify sieve */ 1053 { 1054 PetscInt pStart, pEnd; 1055 1056 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr); 1057 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr); 1058 } 1059 ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr); 1060 ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr); 1061 PetscFunctionReturn(0); 1062 } 1063 1064 #undef __FUNCT__ 1065 #define __FUNCT__ "DMPlexDistributeCoordinates" 1066 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel) 1067 { 1068 MPI_Comm comm; 1069 PetscSection originalCoordSection, newCoordSection; 1070 Vec originalCoordinates, newCoordinates; 1071 PetscInt bs; 1072 const char *name; 1073 const PetscReal *maxCell, *L; 1074 PetscErrorCode ierr; 1075 1076 PetscFunctionBegin; 1077 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1078 PetscValidPointer(dmParallel, 3); 1079 1080 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1081 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr); 1082 ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr); 1083 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr); 1084 if (originalCoordinates) { 1085 ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr); 1086 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr); 1087 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr); 1088 1089 ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr); 1090 ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr); 1091 ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr); 1092 ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr); 1093 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr); 1094 } 1095 ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr); 1096 if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);} 1097 PetscFunctionReturn(0); 1098 } 1099 1100 #undef __FUNCT__ 1101 #define __FUNCT__ "DMPlexDistributeLabels" 1102 /* Here we are assuming that process 0 always has everything */ 1103 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel) 1104 { 1105 MPI_Comm comm; 1106 PetscMPIInt rank; 1107 PetscInt numLabels, numLocalLabels, l; 1108 PetscBool hasLabels = PETSC_FALSE; 1109 PetscErrorCode ierr; 1110 1111 PetscFunctionBegin; 1112 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1113 PetscValidHeaderSpecific(dm, DM_CLASSID, 3); 1114 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1115 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1116 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1117 1118 /* Everyone must have either the same number of labels, or none */ 1119 ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr); 1120 numLabels = numLocalLabels; 1121 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr); 1122 if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE; 1123 for (l = numLabels-1; l >= 0; --l) { 1124 DMLabel label = NULL, labelNew = NULL; 1125 PetscBool isdepth; 1126 1127 if (hasLabels) { 1128 ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr); 1129 /* Skip "depth" because it is recreated */ 1130 ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr); 1131 } 1132 ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr); 1133 if (isdepth) continue; 1134 ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr); 1135 ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr); 1136 } 1137 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1138 PetscFunctionReturn(0); 1139 } 1140 1141 #undef __FUNCT__ 1142 #define __FUNCT__ "DMPlexDistributeSetupHybrid" 1143 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel) 1144 { 1145 DM_Plex *mesh = (DM_Plex*) dm->data; 1146 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1147 MPI_Comm comm; 1148 const PetscInt *gpoints; 1149 PetscInt dim, depth, n, d; 1150 PetscErrorCode ierr; 1151 1152 PetscFunctionBegin; 1153 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1154 PetscValidPointer(dmParallel, 4); 1155 1156 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1157 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1158 1159 /* Setup hybrid structure */ 1160 for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];} 1161 ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr); 1162 ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr); 1163 ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr); 1164 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 1165 for (d = 0; d <= dim; ++d) { 1166 PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p; 1167 1168 if (pmax < 0) continue; 1169 ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr); 1170 ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr); 1171 ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr); 1172 for (p = 0; p < n; ++p) { 1173 const PetscInt point = gpoints[p]; 1174 1175 if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax; 1176 } 1177 if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax; 1178 else pmesh->hybridPointMax[d] = -1; 1179 } 1180 ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr); 1181 PetscFunctionReturn(0); 1182 } 1183 1184 #undef __FUNCT__ 1185 #define __FUNCT__ "DMPlexDistributeSetupTree" 1186 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel) 1187 { 1188 MPI_Comm comm; 1189 DM refTree; 1190 PetscSection origParentSection, newParentSection; 1191 PetscInt *origParents, *origChildIDs; 1192 PetscBool flg; 1193 PetscErrorCode ierr; 1194 1195 PetscFunctionBegin; 1196 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1197 PetscValidHeaderSpecific(dm, DM_CLASSID, 4); 1198 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1199 1200 /* Set up tree */ 1201 ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr); 1202 ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr); 1203 ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr); 1204 if (origParentSection) { 1205 PetscInt pStart, pEnd; 1206 PetscInt *newParents, *newChildIDs; 1207 PetscInt *remoteOffsetsParents, newParentSize; 1208 PetscSF parentSF; 1209 1210 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1211 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr); 1212 ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr); 1213 ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr); 1214 ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr); 1215 ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr); 1216 ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr); 1217 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr); 1218 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr); 1219 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1220 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1221 ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr); 1222 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr); 1223 if (flg) { 1224 ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr); 1225 ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1226 ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr); 1227 ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1228 ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr); 1229 } 1230 ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr); 1231 ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr); 1232 ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr); 1233 ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr); 1234 } 1235 PetscFunctionReturn(0); 1236 } 1237 1238 #undef __FUNCT__ 1239 #define __FUNCT__ "DMPlexDistributeSF" 1240 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel) 1241 { 1242 DM_Plex *mesh = (DM_Plex*) dm->data; 1243 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1244 PetscMPIInt rank, numProcs; 1245 MPI_Comm comm; 1246 PetscErrorCode ierr; 1247 1248 PetscFunctionBegin; 1249 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1250 PetscValidPointer(dmParallel,7); 1251 1252 /* Create point SF for parallel mesh */ 1253 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1254 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1255 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1256 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 1257 { 1258 const PetscInt *leaves; 1259 PetscSFNode *remotePoints, *rowners, *lowners; 1260 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints; 1261 PetscInt pStart, pEnd; 1262 1263 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1264 ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr); 1265 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr); 1266 for (p=0; p<numRoots; p++) { 1267 rowners[p].rank = -1; 1268 rowners[p].index = -1; 1269 } 1270 if (origPart) { 1271 /* Make sure points in the original partition are not assigned to other procs */ 1272 const PetscInt *origPoints; 1273 1274 ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr); 1275 for (p = 0; p < numProcs; ++p) { 1276 PetscInt dof, off, d; 1277 1278 ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr); 1279 ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr); 1280 for (d = off; d < off+dof; ++d) { 1281 rowners[origPoints[d]].rank = p; 1282 } 1283 } 1284 ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr); 1285 } 1286 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1287 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1288 for (p = 0; p < numLeaves; ++p) { 1289 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */ 1290 lowners[p].rank = rank; 1291 lowners[p].index = leaves ? leaves[p] : p; 1292 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */ 1293 lowners[p].rank = -2; 1294 lowners[p].index = -2; 1295 } 1296 } 1297 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */ 1298 rowners[p].rank = -3; 1299 rowners[p].index = -3; 1300 } 1301 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1302 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1303 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1304 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1305 for (p = 0; p < numLeaves; ++p) { 1306 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed"); 1307 if (lowners[p].rank != rank) ++numGhostPoints; 1308 } 1309 ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr); 1310 ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr); 1311 for (p = 0, gp = 0; p < numLeaves; ++p) { 1312 if (lowners[p].rank != rank) { 1313 ghostPoints[gp] = leaves ? leaves[p] : p; 1314 remotePoints[gp].rank = lowners[p].rank; 1315 remotePoints[gp].index = lowners[p].index; 1316 ++gp; 1317 } 1318 } 1319 ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr); 1320 ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 1321 ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr); 1322 } 1323 pmesh->useCone = mesh->useCone; 1324 pmesh->useClosure = mesh->useClosure; 1325 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1326 PetscFunctionReturn(0); 1327 } 1328 1329 #undef __FUNCT__ 1330 #define __FUNCT__ "DMPlexDistribute" 1331 /*@C 1332 DMPlexDistribute - Distributes the mesh and any associated sections. 1333 1334 Not Collective 1335 1336 Input Parameter: 1337 + dm - The original DMPlex object 1338 - overlap - The overlap of partitions, 0 is the default 1339 1340 Output Parameter: 1341 + sf - The PetscSF used for point distribution 1342 - parallelMesh - The distributed DMPlex object, or NULL 1343 1344 Note: If the mesh was not distributed, the return value is NULL. 1345 1346 The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and 1347 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 1348 representation on the mesh. 1349 1350 Level: intermediate 1351 1352 .keywords: mesh, elements 1353 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 1354 @*/ 1355 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel) 1356 { 1357 DM_Plex *mesh = (DM_Plex*) dm->data, *pmesh; 1358 MPI_Comm comm; 1359 PetscInt dim, numRemoteRanks, nroots, nleaves; 1360 DM dmOverlap; 1361 IS cellPart, part; 1362 PetscSection cellPartSection, partSection; 1363 PetscSFNode *remoteRanks, *newRemote; 1364 const PetscSFNode *oldRemote; 1365 PetscSF partSF, pointSF, overlapPointSF, overlapSF; 1366 ISLocalToGlobalMapping renumbering; 1367 PetscBool flg; 1368 PetscMPIInt rank, numProcs, p; 1369 PetscErrorCode ierr; 1370 1371 PetscFunctionBegin; 1372 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1373 if (sf) PetscValidPointer(sf,4); 1374 PetscValidPointer(dmParallel,5); 1375 1376 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1377 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1378 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1379 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 1380 1381 *dmParallel = NULL; 1382 if (numProcs == 1) PetscFunctionReturn(0); 1383 1384 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1385 /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */ 1386 ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1387 if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented"); 1388 ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr); 1389 ierr = PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);CHKERRQ(ierr); 1390 /* Create SF assuming a serial partition for all processes: Could check for IS length here */ 1391 if (!rank) numRemoteRanks = numProcs; 1392 else numRemoteRanks = 0; 1393 ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr); 1394 for (p = 0; p < numRemoteRanks; ++p) { 1395 remoteRanks[p].rank = p; 1396 remoteRanks[p].index = 0; 1397 } 1398 ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr); 1399 ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr); 1400 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 1401 if (flg) { 1402 ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr); 1403 ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1404 ierr = ISView(cellPart, NULL);CHKERRQ(ierr); 1405 ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr); 1406 } 1407 /* Close the partition over the mesh */ 1408 ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr); 1409 /* Create new mesh */ 1410 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr); 1411 ierr = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr); 1412 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr); 1413 pmesh = (DM_Plex*) (*dmParallel)->data; 1414 pmesh->useAnchors = mesh->useAnchors; 1415 1416 /* Distribute sieve points and the global point numbering (replaces creating remote bases) */ 1417 ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr); 1418 if (flg) { 1419 ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr); 1420 ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1421 ierr = ISView(part, NULL);CHKERRQ(ierr); 1422 ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr); 1423 ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr); 1424 ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr); 1425 } 1426 ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1427 1428 /* Migrate data to a non-overlapping parallel DM */ 1429 ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr); 1430 ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr); 1431 ierr = DMPlexDistributeLabels(dm, pointSF, *dmParallel);CHKERRQ(ierr); 1432 ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr); 1433 ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr); 1434 1435 /* Build the point SF without overlap */ 1436 ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);CHKERRQ(ierr); 1437 if (flg) { 1438 ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr); 1439 ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr); 1440 } 1441 1442 if (overlap > 0) { 1443 ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr); 1444 /* Add the partition overlap to the distributed DM */ 1445 ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr); 1446 ierr = DMDestroy(dmParallel);CHKERRQ(ierr); 1447 *dmParallel = dmOverlap; 1448 if (flg) { 1449 ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr); 1450 ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr); 1451 } 1452 1453 /* Re-map the pointSF to establish the full migration pattern */ 1454 ierr = PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr); 1455 ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr); 1456 ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr); 1457 ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1458 ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1459 ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr); 1460 ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1461 ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr); 1462 ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr); 1463 pointSF = overlapPointSF; 1464 ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr); 1465 } 1466 /* Cleanup Partition */ 1467 ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr); 1468 ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr); 1469 ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr); 1470 ierr = ISDestroy(&part);CHKERRQ(ierr); 1471 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr); 1472 ierr = ISDestroy(&cellPart);CHKERRQ(ierr); 1473 /* Copy BC */ 1474 ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr); 1475 /* Cleanup */ 1476 if (sf) {*sf = pointSF;} 1477 else {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);} 1478 ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr); 1479 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1480 PetscFunctionReturn(0); 1481 } 1482 1483 #undef __FUNCT__ 1484 #define __FUNCT__ "DMPlexDistributeOverlap" 1485 /*@C 1486 DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM. 1487 1488 Not Collective 1489 1490 Input Parameter: 1491 + dm - The non-overlapping distrbuted DMPlex object 1492 - overlap - The overlap of partitions, 0 is the default 1493 1494 Output Parameter: 1495 + sf - The PetscSF used for point distribution 1496 - dmOverlap - The overlapping distributed DMPlex object, or NULL 1497 1498 Note: If the mesh was not distributed, the return value is NULL. 1499 1500 The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and 1501 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 1502 representation on the mesh. 1503 1504 Level: intermediate 1505 1506 .keywords: mesh, elements 1507 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 1508 @*/ 1509 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap) 1510 { 1511 MPI_Comm comm; 1512 PetscMPIInt rank; 1513 PetscSection rootSection, leafSection; 1514 IS rootrank, leafrank; 1515 PetscSection coneSection; 1516 PetscSF overlapSF, migrationSF, pointSF, newPointSF; 1517 PetscSFNode *ghostRemote; 1518 const PetscSFNode *overlapRemote; 1519 ISLocalToGlobalMapping overlapRenumbering; 1520 const PetscInt *renumberingArray, *overlapLocal; 1521 PetscInt dim, p, pStart, pEnd, conesSize, idx; 1522 PetscInt numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves; 1523 PetscInt *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs; 1524 PetscErrorCode ierr; 1525 1526 PetscFunctionBegin; 1527 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1528 if (sf) PetscValidPointer(sf, 3); 1529 PetscValidPointer(dmOverlap, 4); 1530 1531 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1532 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1533 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1534 1535 /* Compute point overlap with neighbouring processes on the distributed DM */ 1536 ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1537 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 1538 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr); 1539 ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr); 1540 ierr = DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr); 1541 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 1542 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 1543 ierr = ISDestroy(&rootrank);CHKERRQ(ierr); 1544 ierr = ISDestroy(&leafrank);CHKERRQ(ierr); 1545 ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1546 1547 /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */ 1548 ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr); 1549 1550 /* Convert cones to global numbering before migrating them */ 1551 ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr); 1552 ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr); 1553 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); 1554 ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr); 1555 1556 /* Derive the new local-to-global mapping from the old one */ 1557 ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr); 1558 ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr); 1559 ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr); 1560 ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr); 1561 ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr); 1562 ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr); 1563 1564 /* Build the overlapping DM */ 1565 ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr); 1566 ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr); 1567 ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr); 1568 ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr); 1569 ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr); 1570 ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr); 1571 ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr); 1572 1573 /* Build the new point SF by propagating the depthShift generate remote root indices */ 1574 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr); 1575 ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr); 1576 ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr); 1577 numGhostPoints = numSharedPoints + numOverlapPoints; 1578 ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr); 1579 ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr); 1580 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 1581 ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr); 1582 for (p=0; p<overlapLeaves; p++) { 1583 if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p]; 1584 } 1585 ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr); 1586 ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr); 1587 for (idx=0, p=0; p<overlapLeaves; p++) { 1588 if (overlapRemote[p].rank != rank) { 1589 ghostLocal[idx] = overlapLocal[p]; 1590 ghostRemote[idx].index = recvPointIDs[p]; 1591 ghostRemote[idx].rank = overlapRemote[p].rank; 1592 idx++; 1593 } 1594 } 1595 ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr); 1596 ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr); 1597 ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1598 ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr); 1599 ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr); 1600 /* Cleanup overlap partition */ 1601 ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr); 1602 ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr); 1603 ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr); 1604 if (sf) *sf = migrationSF; 1605 else {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);} 1606 PetscFunctionReturn(0); 1607 } 1608