1 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 #include <petsc/private/dmlabelimpl.h> /*I "petscdmlabel.h" I*/ 3 4 #undef __FUNCT__ 5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone" 6 /*@ 7 DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first 8 9 Input Parameters: 10 + dm - The DM object 11 - useCone - Flag to use the cone first 12 13 Level: intermediate 14 15 Notes: 16 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 17 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 18 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 19 20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 21 @*/ 22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone) 23 { 24 DM_Plex *mesh = (DM_Plex *) dm->data; 25 26 PetscFunctionBegin; 27 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 28 mesh->useCone = useCone; 29 PetscFunctionReturn(0); 30 } 31 32 #undef __FUNCT__ 33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone" 34 /*@ 35 DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first 36 37 Input Parameter: 38 . dm - The DM object 39 40 Output Parameter: 41 . useCone - Flag to use the cone first 42 43 Level: intermediate 44 45 Notes: 46 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 47 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 48 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 49 50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 51 @*/ 52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone) 53 { 54 DM_Plex *mesh = (DM_Plex *) dm->data; 55 56 PetscFunctionBegin; 57 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 58 PetscValidIntPointer(useCone, 2); 59 *useCone = mesh->useCone; 60 PetscFunctionReturn(0); 61 } 62 63 #undef __FUNCT__ 64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure" 65 /*@ 66 DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure 67 68 Input Parameters: 69 + dm - The DM object 70 - useClosure - Flag to use the closure 71 72 Level: intermediate 73 74 Notes: 75 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 76 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 77 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 78 79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 80 @*/ 81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure) 82 { 83 DM_Plex *mesh = (DM_Plex *) dm->data; 84 85 PetscFunctionBegin; 86 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 87 mesh->useClosure = useClosure; 88 PetscFunctionReturn(0); 89 } 90 91 #undef __FUNCT__ 92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure" 93 /*@ 94 DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure 95 96 Input Parameter: 97 . dm - The DM object 98 99 Output Parameter: 100 . useClosure - Flag to use the closure 101 102 Level: intermediate 103 104 Notes: 105 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 106 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 107 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 108 109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 110 @*/ 111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure) 112 { 113 DM_Plex *mesh = (DM_Plex *) dm->data; 114 115 PetscFunctionBegin; 116 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 117 PetscValidIntPointer(useClosure, 2); 118 *useClosure = mesh->useClosure; 119 PetscFunctionReturn(0); 120 } 121 122 #undef __FUNCT__ 123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors" 124 /*@ 125 DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints. 126 127 Input Parameters: 128 + dm - The DM object 129 - useAnchors - Flag to use the constraints. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 130 131 Level: intermediate 132 133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 134 @*/ 135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors) 136 { 137 DM_Plex *mesh = (DM_Plex *) dm->data; 138 139 PetscFunctionBegin; 140 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 141 mesh->useAnchors = useAnchors; 142 PetscFunctionReturn(0); 143 } 144 145 #undef __FUNCT__ 146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors" 147 /*@ 148 DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints. 149 150 Input Parameter: 151 . dm - The DM object 152 153 Output Parameter: 154 . useAnchors - Flag to use the closure. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 155 156 Level: intermediate 157 158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 159 @*/ 160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors) 161 { 162 DM_Plex *mesh = (DM_Plex *) dm->data; 163 164 PetscFunctionBegin; 165 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 166 PetscValidIntPointer(useAnchors, 2); 167 *useAnchors = mesh->useAnchors; 168 PetscFunctionReturn(0); 169 } 170 171 #undef __FUNCT__ 172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal" 173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 174 { 175 const PetscInt *cone = NULL; 176 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c; 177 PetscErrorCode ierr; 178 179 PetscFunctionBeginHot; 180 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 181 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 182 for (c = 0; c <= coneSize; ++c) { 183 const PetscInt point = !c ? p : cone[c-1]; 184 const PetscInt *support = NULL; 185 PetscInt supportSize, s, q; 186 187 ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr); 188 ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr); 189 for (s = 0; s < supportSize; ++s) { 190 for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) { 191 if (support[s] == adj[q]) break; 192 } 193 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 194 } 195 } 196 *adjSize = numAdj; 197 PetscFunctionReturn(0); 198 } 199 200 #undef __FUNCT__ 201 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal" 202 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 203 { 204 const PetscInt *support = NULL; 205 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s; 206 PetscErrorCode ierr; 207 208 PetscFunctionBeginHot; 209 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr); 210 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr); 211 for (s = 0; s <= supportSize; ++s) { 212 const PetscInt point = !s ? p : support[s-1]; 213 const PetscInt *cone = NULL; 214 PetscInt coneSize, c, q; 215 216 ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr); 217 ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr); 218 for (c = 0; c < coneSize; ++c) { 219 for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) { 220 if (cone[c] == adj[q]) break; 221 } 222 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 223 } 224 } 225 *adjSize = numAdj; 226 PetscFunctionReturn(0); 227 } 228 229 #undef __FUNCT__ 230 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal" 231 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[]) 232 { 233 PetscInt *star = NULL; 234 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s; 235 PetscErrorCode ierr; 236 237 PetscFunctionBeginHot; 238 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 239 for (s = 0; s < starSize*2; s += 2) { 240 const PetscInt *closure = NULL; 241 PetscInt closureSize, c, q; 242 243 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 244 for (c = 0; c < closureSize*2; c += 2) { 245 for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) { 246 if (closure[c] == adj[q]) break; 247 } 248 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 249 } 250 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 251 } 252 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 253 *adjSize = numAdj; 254 PetscFunctionReturn(0); 255 } 256 257 #undef __FUNCT__ 258 #define __FUNCT__ "DMPlexGetAdjacency_Internal" 259 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[]) 260 { 261 static PetscInt asiz = 0; 262 PetscInt maxAnchors = 1; 263 PetscInt aStart = -1, aEnd = -1; 264 PetscInt maxAdjSize; 265 PetscSection aSec = NULL; 266 IS aIS = NULL; 267 const PetscInt *anchors; 268 PetscErrorCode ierr; 269 270 PetscFunctionBeginHot; 271 if (useAnchors) { 272 ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr); 273 if (aSec) { 274 ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr); 275 maxAnchors = PetscMax(1,maxAnchors); 276 ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr); 277 ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr); 278 } 279 } 280 if (!*adj) { 281 PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd; 282 283 ierr = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr); 284 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 285 ierr = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr); 286 coneSeries = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1; 287 supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1; 288 asiz = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries); 289 asiz *= maxAnchors; 290 asiz = PetscMin(asiz,pEnd-pStart); 291 ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr); 292 } 293 if (*adjSize < 0) *adjSize = asiz; 294 maxAdjSize = *adjSize; 295 if (useTransitiveClosure) { 296 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr); 297 } else if (useCone) { 298 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 299 } else { 300 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 301 } 302 if (useAnchors && aSec) { 303 PetscInt origSize = *adjSize; 304 PetscInt numAdj = origSize; 305 PetscInt i = 0, j; 306 PetscInt *orig = *adj; 307 308 while (i < origSize) { 309 PetscInt p = orig[i]; 310 PetscInt aDof = 0; 311 312 if (p >= aStart && p < aEnd) { 313 ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr); 314 } 315 if (aDof) { 316 PetscInt aOff; 317 PetscInt s, q; 318 319 for (j = i + 1; j < numAdj; j++) { 320 orig[j - 1] = orig[j]; 321 } 322 origSize--; 323 numAdj--; 324 ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr); 325 for (s = 0; s < aDof; ++s) { 326 for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) { 327 if (anchors[aOff+s] == orig[q]) break; 328 } 329 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 330 } 331 } 332 else { 333 i++; 334 } 335 } 336 *adjSize = numAdj; 337 ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr); 338 } 339 PetscFunctionReturn(0); 340 } 341 342 #undef __FUNCT__ 343 #define __FUNCT__ "DMPlexGetAdjacency" 344 /*@ 345 DMPlexGetAdjacency - Return all points adjacent to the given point 346 347 Input Parameters: 348 + dm - The DM object 349 . p - The point 350 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE 351 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize 352 353 Output Parameters: 354 + adjSize - The number of adjacent points 355 - adj - The adjacent points 356 357 Level: advanced 358 359 Notes: The user must PetscFree the adj array if it was not passed in. 360 361 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator() 362 @*/ 363 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[]) 364 { 365 DM_Plex *mesh = (DM_Plex *) dm->data; 366 PetscErrorCode ierr; 367 368 PetscFunctionBeginHot; 369 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 370 PetscValidPointer(adjSize,3); 371 PetscValidPointer(adj,4); 372 ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr); 373 PetscFunctionReturn(0); 374 } 375 376 #undef __FUNCT__ 377 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF" 378 /*@ 379 DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity 380 381 Collective on DM 382 383 Input Parameters: 384 + dm - The DM 385 - sfPoint - The PetscSF which encodes point connectivity 386 387 Output Parameters: 388 + processRanks - A list of process neighbors, or NULL 389 - sfProcess - An SF encoding the two-sided process connectivity, or NULL 390 391 Level: developer 392 393 .seealso: PetscSFCreate(), DMPlexCreateProcessSF() 394 @*/ 395 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess) 396 { 397 const PetscSFNode *remotePoints; 398 PetscInt *localPointsNew; 399 PetscSFNode *remotePointsNew; 400 const PetscInt *nranks; 401 PetscInt *ranksNew; 402 PetscBT neighbors; 403 PetscInt pStart, pEnd, p, numLeaves, l, numNeighbors, n; 404 PetscMPIInt numProcs, proc, rank; 405 PetscErrorCode ierr; 406 407 PetscFunctionBegin; 408 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 409 PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2); 410 if (processRanks) {PetscValidPointer(processRanks, 3);} 411 if (sfProcess) {PetscValidPointer(sfProcess, 4);} 412 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 413 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 414 ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr); 415 ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr); 416 ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr); 417 /* Compute root-to-leaf process connectivity */ 418 ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr); 419 ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr); 420 for (p = pStart; p < pEnd; ++p) { 421 PetscInt ndof, noff, n; 422 423 ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr); 424 ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr); 425 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);} 426 } 427 ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr); 428 /* Compute leaf-to-neighbor process connectivity */ 429 ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr); 430 ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr); 431 for (p = pStart; p < pEnd; ++p) { 432 PetscInt ndof, noff, n; 433 434 ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr); 435 ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr); 436 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);} 437 } 438 ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr); 439 /* Compute leaf-to-root process connectivity */ 440 for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);} 441 /* Calculate edges */ 442 PetscBTClear(neighbors, rank); 443 for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;} 444 ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr); 445 ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr); 446 ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr); 447 for(proc = 0, n = 0; proc < numProcs; ++proc) { 448 if (PetscBTLookup(neighbors, proc)) { 449 ranksNew[n] = proc; 450 localPointsNew[n] = proc; 451 remotePointsNew[n].index = rank; 452 remotePointsNew[n].rank = proc; 453 ++n; 454 } 455 } 456 ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr); 457 if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);} 458 else {ierr = PetscFree(ranksNew);CHKERRQ(ierr);} 459 if (sfProcess) { 460 ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr); 461 ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr); 462 ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr); 463 ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr); 464 } 465 PetscFunctionReturn(0); 466 } 467 468 #undef __FUNCT__ 469 #define __FUNCT__ "DMPlexDistributeOwnership" 470 /*@ 471 DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF. 472 473 Collective on DM 474 475 Input Parameter: 476 . dm - The DM 477 478 Output Parameters: 479 + rootSection - The number of leaves for a given root point 480 . rootrank - The rank of each edge into the root point 481 . leafSection - The number of processes sharing a given leaf point 482 - leafrank - The rank of each process sharing a leaf point 483 484 Level: developer 485 486 .seealso: DMPlexCreateOverlap() 487 @*/ 488 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank) 489 { 490 MPI_Comm comm; 491 PetscSF sfPoint; 492 const PetscInt *rootdegree; 493 PetscInt *myrank, *remoterank; 494 PetscInt pStart, pEnd, p, nedges; 495 PetscMPIInt rank; 496 PetscErrorCode ierr; 497 498 PetscFunctionBegin; 499 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 500 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 501 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 502 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 503 /* Compute number of leaves for each root */ 504 ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr); 505 ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr); 506 ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr); 507 ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr); 508 for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);} 509 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 510 /* Gather rank of each leaf to root */ 511 ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr); 512 ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr); 513 ierr = PetscMalloc1(nedges, &remoterank);CHKERRQ(ierr); 514 for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank; 515 ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 516 ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 517 ierr = PetscFree(myrank);CHKERRQ(ierr); 518 ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr); 519 /* Distribute remote ranks to leaves */ 520 ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr); 521 ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr); 522 PetscFunctionReturn(0); 523 } 524 525 #undef __FUNCT__ 526 #define __FUNCT__ "DMPlexCreateOverlap" 527 /*@C 528 DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF. 529 530 Collective on DM 531 532 Input Parameters: 533 + dm - The DM 534 . levels - Number of overlap levels 535 . rootSection - The number of leaves for a given root point 536 . rootrank - The rank of each edge into the root point 537 . leafSection - The number of processes sharing a given leaf point 538 - leafrank - The rank of each process sharing a leaf point 539 540 Output Parameters: 541 + ovLabel - DMLabel containing remote overlap contributions as point/rank pairings 542 543 Level: developer 544 545 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute() 546 @*/ 547 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel) 548 { 549 MPI_Comm comm; 550 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */ 551 PetscSF sfPoint, sfProc; 552 const PetscSFNode *remote; 553 const PetscInt *local; 554 const PetscInt *nrank, *rrank; 555 PetscInt *adj = NULL; 556 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l; 557 PetscMPIInt rank, numProcs; 558 PetscBool useCone, useClosure, flg; 559 PetscErrorCode ierr; 560 561 PetscFunctionBegin; 562 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 563 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 564 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 565 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 566 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 567 ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr); 568 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 569 ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr); 570 /* Handle leaves: shared with the root point */ 571 for (l = 0; l < nleaves; ++l) { 572 PetscInt adjSize = PETSC_DETERMINE, a; 573 574 ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr); 575 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);} 576 } 577 ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr); 578 ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr); 579 /* Handle roots */ 580 for (p = pStart; p < pEnd; ++p) { 581 PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a; 582 583 if ((p >= sStart) && (p < sEnd)) { 584 /* Some leaves share a root with other leaves on different processes */ 585 ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr); 586 if (neighbors) { 587 ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr); 588 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 589 for (n = 0; n < neighbors; ++n) { 590 const PetscInt remoteRank = nrank[noff+n]; 591 592 if (remoteRank == rank) continue; 593 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 594 } 595 } 596 } 597 /* Roots are shared with leaves */ 598 ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr); 599 if (!neighbors) continue; 600 ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr); 601 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 602 for (n = 0; n < neighbors; ++n) { 603 const PetscInt remoteRank = rrank[noff+n]; 604 605 if (remoteRank == rank) continue; 606 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 607 } 608 } 609 ierr = PetscFree(adj);CHKERRQ(ierr); 610 ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr); 611 ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr); 612 /* Add additional overlap levels */ 613 for (l = 1; l < levels; l++) { 614 /* Propagate point donations over SF to capture remote connections */ 615 ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr); 616 /* Add next level of point donations to the label */ 617 ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr); 618 } 619 /* We require the closure in the overlap */ 620 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 621 ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr); 622 if (useCone || !useClosure) { 623 ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr); 624 } 625 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr); 626 if (flg) { 627 ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 628 } 629 /* Make global process SF and invert sender to receiver label */ 630 { 631 /* Build a global process SF */ 632 PetscSFNode *remoteProc; 633 ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr); 634 for (p = 0; p < numProcs; ++p) { 635 remoteProc[p].rank = p; 636 remoteProc[p].index = rank; 637 } 638 ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr); 639 ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr); 640 ierr = PetscSFSetGraph(sfProc, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr); 641 } 642 ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr); 643 ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr); 644 /* Add owned points, except for shared local points */ 645 for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);} 646 for (l = 0; l < nleaves; ++l) { 647 ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr); 648 ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr); 649 } 650 /* Clean up */ 651 ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr); 652 ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr); 653 PetscFunctionReturn(0); 654 } 655 656 #undef __FUNCT__ 657 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF" 658 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF) 659 { 660 MPI_Comm comm; 661 PetscMPIInt rank, numProcs; 662 PetscInt d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints; 663 PetscInt *pointDepths, *remoteDepths, *ilocal; 664 PetscInt *depthRecv, *depthShift, *depthIdx; 665 PetscSFNode *iremote; 666 PetscSF pointSF; 667 const PetscInt *sharedLocal; 668 const PetscSFNode *overlapRemote, *sharedRemote; 669 PetscErrorCode ierr; 670 671 PetscFunctionBegin; 672 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 673 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 674 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 675 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 676 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 677 678 /* Before building the migration SF we need to know the new stratum offsets */ 679 ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr); 680 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr); 681 for (d=0; d<dim+1; d++) { 682 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 683 for (p=pStart; p<pEnd; p++) pointDepths[p] = d; 684 } 685 for (p=0; p<nleaves; p++) remoteDepths[p] = -1; 686 ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 687 ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 688 689 /* Count recevied points in each stratum and compute the internal strata shift */ 690 ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr); 691 for (d=0; d<dim+1; d++) depthRecv[d]=0; 692 for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++; 693 depthShift[dim] = 0; 694 for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim]; 695 for (d=1; d<dim; d++) depthShift[d] += depthRecv[0]; 696 for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1]; 697 for (d=0; d<dim+1; d++) { 698 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 699 depthIdx[d] = pStart + depthShift[d]; 700 } 701 702 /* Form the overlap SF build an SF that describes the full overlap migration SF */ 703 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 704 newLeaves = pEnd - pStart + nleaves; 705 ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr); 706 ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr); 707 /* First map local points to themselves */ 708 for (d=0; d<dim+1; d++) { 709 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 710 for (p=pStart; p<pEnd; p++) { 711 point = p + depthShift[d]; 712 ilocal[point] = point; 713 iremote[point].index = p; 714 iremote[point].rank = rank; 715 depthIdx[d]++; 716 } 717 } 718 719 /* Add in the remote roots for currently shared points */ 720 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr); 721 ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr); 722 for (d=0; d<dim+1; d++) { 723 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 724 for (p=0; p<numSharedPoints; p++) { 725 if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) { 726 point = sharedLocal[p] + depthShift[d]; 727 iremote[point].index = sharedRemote[p].index; 728 iremote[point].rank = sharedRemote[p].rank; 729 } 730 } 731 } 732 733 /* Now add the incoming overlap points */ 734 for (p=0; p<nleaves; p++) { 735 point = depthIdx[remoteDepths[p]]; 736 ilocal[point] = point; 737 iremote[point].index = overlapRemote[p].index; 738 iremote[point].rank = overlapRemote[p].rank; 739 depthIdx[remoteDepths[p]]++; 740 } 741 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr); 742 743 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr); 744 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr); 745 ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr); 746 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 747 ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr); 748 749 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr); 750 PetscFunctionReturn(0); 751 } 752 753 #undef __FUNCT__ 754 #define __FUNCT__ "DMPlexStratifyMigrationSF" 755 /*@ 756 DMPlexStratifyMigrationSF - Add partition overlap to a distributed non-overlapping DM. 757 758 Input Parameter: 759 + dm - The DM 760 - sf - A star forest with non-ordered leaves, usually defining a DM point migration 761 762 Output Parameter: 763 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified 764 765 Level: developer 766 767 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap() 768 @*/ 769 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF) 770 { 771 MPI_Comm comm; 772 PetscMPIInt rank, numProcs; 773 PetscInt d, ldepth, depth, p, pStart, pEnd, nroots, nleaves; 774 PetscInt *pointDepths, *remoteDepths, *ilocal; 775 PetscInt *depthRecv, *depthShift, *depthIdx; 776 const PetscSFNode *iremote; 777 PetscErrorCode ierr; 778 779 PetscFunctionBegin; 780 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 781 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 782 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 783 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 784 ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr); 785 ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr); 786 if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth); 787 788 /* Before building the migration SF we need to know the new stratum offsets */ 789 ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr); 790 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr); 791 for (d = 0; d < depth+1; ++d) { 792 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 793 for (p = pStart; p < pEnd; ++p) pointDepths[p] = d; 794 } 795 for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1; 796 ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 797 ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 798 /* Count recevied points in each stratum and compute the internal strata shift */ 799 ierr = PetscMalloc3(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx);CHKERRQ(ierr); 800 for (d = 0; d < depth+1; ++d) depthRecv[d] = 0; 801 for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++; 802 depthShift[depth] = 0; 803 for (d = 0; d < depth; ++d) depthShift[d] = depthRecv[depth]; 804 for (d = 1; d < depth; ++d) depthShift[d] += depthRecv[0]; 805 for (d = depth-2; d > 0; --d) depthShift[d] += depthRecv[d+1]; 806 for (d = 0; d < depth+1; ++d) {depthIdx[d] = 0;} 807 /* Derive a new local permutation based on stratified indices */ 808 ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr); 809 for (p = 0; p < nleaves; ++p) { 810 const PetscInt dep = remoteDepths[p]; 811 812 ilocal[p] = depthShift[dep] + depthIdx[dep]; 813 depthIdx[dep]++; 814 } 815 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr); 816 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr); 817 ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr); 818 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr); 819 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr); 820 PetscFunctionReturn(0); 821 } 822 823 #undef __FUNCT__ 824 #define __FUNCT__ "DMPlexDistributeField" 825 /*@ 826 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 827 828 Collective on DM 829 830 Input Parameters: 831 + dm - The DMPlex object 832 . pointSF - The PetscSF describing the communication pattern 833 . originalSection - The PetscSection for existing data layout 834 - originalVec - The existing data 835 836 Output Parameters: 837 + newSection - The PetscSF describing the new data layout 838 - newVec - The new data 839 840 Level: developer 841 842 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData() 843 @*/ 844 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec) 845 { 846 PetscSF fieldSF; 847 PetscInt *remoteOffsets, fieldSize; 848 PetscScalar *originalValues, *newValues; 849 PetscErrorCode ierr; 850 851 PetscFunctionBegin; 852 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 853 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 854 855 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 856 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr); 857 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr); 858 859 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr); 860 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr); 861 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 862 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 863 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 864 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 865 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 866 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr); 867 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr); 868 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 869 PetscFunctionReturn(0); 870 } 871 872 #undef __FUNCT__ 873 #define __FUNCT__ "DMPlexDistributeFieldIS" 874 /*@ 875 DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 876 877 Collective on DM 878 879 Input Parameters: 880 + dm - The DMPlex object 881 . pointSF - The PetscSF describing the communication pattern 882 . originalSection - The PetscSection for existing data layout 883 - originalIS - The existing data 884 885 Output Parameters: 886 + newSection - The PetscSF describing the new data layout 887 - newIS - The new data 888 889 Level: developer 890 891 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData() 892 @*/ 893 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS) 894 { 895 PetscSF fieldSF; 896 PetscInt *newValues, *remoteOffsets, fieldSize; 897 const PetscInt *originalValues; 898 PetscErrorCode ierr; 899 900 PetscFunctionBegin; 901 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 902 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 903 904 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 905 ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr); 906 907 ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr); 908 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 909 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 910 ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 911 ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 912 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 913 ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr); 914 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr); 915 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 916 PetscFunctionReturn(0); 917 } 918 919 #undef __FUNCT__ 920 #define __FUNCT__ "DMPlexDistributeData" 921 /*@ 922 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 923 924 Collective on DM 925 926 Input Parameters: 927 + dm - The DMPlex object 928 . pointSF - The PetscSF describing the communication pattern 929 . originalSection - The PetscSection for existing data layout 930 . datatype - The type of data 931 - originalData - The existing data 932 933 Output Parameters: 934 + newSection - The PetscSection describing the new data layout 935 - newData - The new data 936 937 Level: developer 938 939 .seealso: DMPlexDistribute(), DMPlexDistributeField() 940 @*/ 941 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData) 942 { 943 PetscSF fieldSF; 944 PetscInt *remoteOffsets, fieldSize; 945 PetscMPIInt dataSize; 946 PetscErrorCode ierr; 947 948 PetscFunctionBegin; 949 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 950 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 951 952 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 953 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr); 954 ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr); 955 956 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 957 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 958 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 959 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 960 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 961 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 962 PetscFunctionReturn(0); 963 } 964 965 #undef __FUNCT__ 966 #define __FUNCT__ "DMPlexDistributeCones" 967 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel) 968 { 969 DM_Plex *mesh = (DM_Plex*) dm->data; 970 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 971 MPI_Comm comm; 972 PetscSF coneSF; 973 PetscSection originalConeSection, newConeSection; 974 PetscInt *remoteOffsets, *cones, *globCones, *newCones, newConesSize; 975 PetscBool flg; 976 PetscErrorCode ierr; 977 978 PetscFunctionBegin; 979 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 980 PetscValidPointer(dmParallel,4); 981 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 982 983 /* Distribute cone section */ 984 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 985 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr); 986 ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr); 987 ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr); 988 ierr = DMSetUp(dmParallel);CHKERRQ(ierr); 989 { 990 PetscInt pStart, pEnd, p; 991 992 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr); 993 for (p = pStart; p < pEnd; ++p) { 994 PetscInt coneSize; 995 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr); 996 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize); 997 } 998 } 999 /* Communicate and renumber cones */ 1000 ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr); 1001 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 1002 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); 1003 if (original) { 1004 PetscInt numCones; 1005 1006 ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr); 1007 ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr); 1008 } 1009 else { 1010 globCones = cones; 1011 } 1012 ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr); 1013 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr); 1014 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr); 1015 if (original) { 1016 ierr = PetscFree(globCones);CHKERRQ(ierr); 1017 } 1018 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr); 1019 ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr); 1020 #if PETSC_USE_DEBUG 1021 { 1022 PetscInt p; 1023 PetscBool valid = PETSC_TRUE; 1024 for (p = 0; p < newConesSize; ++p) { 1025 if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);} 1026 } 1027 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map"); 1028 } 1029 #endif 1030 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr); 1031 if (flg) { 1032 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr); 1033 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1034 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr); 1035 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1036 ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr); 1037 } 1038 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr); 1039 ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr); 1040 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 1041 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 1042 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr); 1043 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 1044 /* Create supports and stratify sieve */ 1045 { 1046 PetscInt pStart, pEnd; 1047 1048 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr); 1049 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr); 1050 } 1051 ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr); 1052 ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr); 1053 pmesh->useCone = mesh->useCone; 1054 pmesh->useClosure = mesh->useClosure; 1055 PetscFunctionReturn(0); 1056 } 1057 1058 #undef __FUNCT__ 1059 #define __FUNCT__ "DMPlexDistributeCoordinates" 1060 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel) 1061 { 1062 MPI_Comm comm; 1063 PetscSection originalCoordSection, newCoordSection; 1064 Vec originalCoordinates, newCoordinates; 1065 PetscInt bs; 1066 const char *name; 1067 const PetscReal *maxCell, *L; 1068 const DMBoundaryType *bd; 1069 PetscErrorCode ierr; 1070 1071 PetscFunctionBegin; 1072 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1073 PetscValidPointer(dmParallel, 3); 1074 1075 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1076 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr); 1077 ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr); 1078 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr); 1079 if (originalCoordinates) { 1080 ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr); 1081 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr); 1082 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr); 1083 1084 ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr); 1085 ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr); 1086 ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr); 1087 ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr); 1088 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr); 1089 } 1090 ierr = DMGetPeriodicity(dm, &maxCell, &L, &bd);CHKERRQ(ierr); 1091 if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L, bd);CHKERRQ(ierr);} 1092 PetscFunctionReturn(0); 1093 } 1094 1095 #undef __FUNCT__ 1096 #define __FUNCT__ "DMPlexDistributeLabels" 1097 /* Here we are assuming that process 0 always has everything */ 1098 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel) 1099 { 1100 DM_Plex *mesh = (DM_Plex*) dm->data; 1101 MPI_Comm comm; 1102 DMLabel depthLabel; 1103 PetscMPIInt rank; 1104 PetscInt depth, d, numLabels, numLocalLabels, l; 1105 PetscBool hasLabels = PETSC_FALSE, lsendDepth, sendDepth; 1106 PetscObjectState depthState = -1; 1107 PetscErrorCode ierr; 1108 1109 PetscFunctionBegin; 1110 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1111 PetscValidHeaderSpecific(dm, DM_CLASSID, 3); 1112 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1113 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1114 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1115 1116 /* If the user has changed the depth label, communicate it instead */ 1117 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 1118 ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr); 1119 if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);} 1120 lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE; 1121 ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr); 1122 if (sendDepth) { 1123 ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr); 1124 ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr); 1125 } 1126 /* Everyone must have either the same number of labels, or none */ 1127 ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr); 1128 numLabels = numLocalLabels; 1129 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr); 1130 if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE; 1131 for (l = numLabels-1; l >= 0; --l) { 1132 DMLabel label = NULL, labelNew = NULL; 1133 PetscBool isdepth; 1134 1135 if (hasLabels) { 1136 ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr); 1137 /* Skip "depth" because it is recreated */ 1138 ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr); 1139 } 1140 ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr); 1141 if (isdepth && !sendDepth) continue; 1142 ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr); 1143 if (isdepth) { 1144 /* Put in any missing strata which can occur if users are managing the depth label themselves */ 1145 PetscInt gdepth; 1146 1147 ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr); 1148 if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth); 1149 for (d = 0; d <= gdepth; ++d) { 1150 PetscBool has; 1151 1152 ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr); 1153 if (!has) ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr); 1154 } 1155 } 1156 ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr); 1157 } 1158 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1159 PetscFunctionReturn(0); 1160 } 1161 1162 #undef __FUNCT__ 1163 #define __FUNCT__ "DMPlexDistributeSetupHybrid" 1164 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel) 1165 { 1166 DM_Plex *mesh = (DM_Plex*) dm->data; 1167 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1168 MPI_Comm comm; 1169 const PetscInt *gpoints; 1170 PetscInt dim, depth, n, d; 1171 PetscErrorCode ierr; 1172 1173 PetscFunctionBegin; 1174 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1175 PetscValidPointer(dmParallel, 4); 1176 1177 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1178 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1179 1180 /* Setup hybrid structure */ 1181 for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];} 1182 ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr); 1183 ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr); 1184 ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr); 1185 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 1186 for (d = 0; d <= dim; ++d) { 1187 PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p; 1188 1189 if (pmax < 0) continue; 1190 ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr); 1191 ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr); 1192 ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr); 1193 for (p = 0; p < n; ++p) { 1194 const PetscInt point = gpoints[p]; 1195 1196 if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax; 1197 } 1198 if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax; 1199 else pmesh->hybridPointMax[d] = -1; 1200 } 1201 ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr); 1202 PetscFunctionReturn(0); 1203 } 1204 1205 #undef __FUNCT__ 1206 #define __FUNCT__ "DMPlexDistributeSetupTree" 1207 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel) 1208 { 1209 DM_Plex *mesh = (DM_Plex*) dm->data; 1210 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1211 MPI_Comm comm; 1212 DM refTree; 1213 PetscSection origParentSection, newParentSection; 1214 PetscInt *origParents, *origChildIDs; 1215 PetscBool flg; 1216 PetscErrorCode ierr; 1217 1218 PetscFunctionBegin; 1219 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1220 PetscValidHeaderSpecific(dm, DM_CLASSID, 4); 1221 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1222 1223 /* Set up tree */ 1224 ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr); 1225 ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr); 1226 ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr); 1227 if (origParentSection) { 1228 PetscInt pStart, pEnd; 1229 PetscInt *newParents, *newChildIDs, *globParents; 1230 PetscInt *remoteOffsetsParents, newParentSize; 1231 PetscSF parentSF; 1232 1233 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1234 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr); 1235 ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr); 1236 ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr); 1237 ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr); 1238 ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr); 1239 ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr); 1240 ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr); 1241 if (original) { 1242 PetscInt numParents; 1243 1244 ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr); 1245 ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr); 1246 ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr); 1247 } 1248 else { 1249 globParents = origParents; 1250 } 1251 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr); 1252 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr); 1253 if (original) { 1254 ierr = PetscFree(globParents);CHKERRQ(ierr); 1255 } 1256 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1257 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1258 ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr); 1259 #if PETSC_USE_DEBUG 1260 { 1261 PetscInt p; 1262 PetscBool valid = PETSC_TRUE; 1263 for (p = 0; p < newParentSize; ++p) { 1264 if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);} 1265 } 1266 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map"); 1267 } 1268 #endif 1269 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr); 1270 if (flg) { 1271 ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr); 1272 ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1273 ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr); 1274 ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1275 ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr); 1276 } 1277 ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr); 1278 ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr); 1279 ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr); 1280 ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr); 1281 } 1282 pmesh->useAnchors = mesh->useAnchors; 1283 PetscFunctionReturn(0); 1284 } 1285 1286 #undef __FUNCT__ 1287 #define __FUNCT__ "DMPlexDistributeSF" 1288 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel) 1289 { 1290 DM_Plex *mesh = (DM_Plex*) dm->data; 1291 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1292 PetscMPIInt rank, numProcs; 1293 MPI_Comm comm; 1294 PetscErrorCode ierr; 1295 1296 PetscFunctionBegin; 1297 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1298 PetscValidPointer(dmParallel,7); 1299 1300 /* Create point SF for parallel mesh */ 1301 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1302 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1303 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1304 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 1305 { 1306 const PetscInt *leaves; 1307 PetscSFNode *remotePoints, *rowners, *lowners; 1308 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints; 1309 PetscInt pStart, pEnd; 1310 1311 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1312 ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr); 1313 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr); 1314 for (p=0; p<numRoots; p++) { 1315 rowners[p].rank = -1; 1316 rowners[p].index = -1; 1317 } 1318 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1319 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1320 for (p = 0; p < numLeaves; ++p) { 1321 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */ 1322 lowners[p].rank = rank; 1323 lowners[p].index = leaves ? leaves[p] : p; 1324 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */ 1325 lowners[p].rank = -2; 1326 lowners[p].index = -2; 1327 } 1328 } 1329 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */ 1330 rowners[p].rank = -3; 1331 rowners[p].index = -3; 1332 } 1333 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1334 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1335 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1336 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1337 for (p = 0; p < numLeaves; ++p) { 1338 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed"); 1339 if (lowners[p].rank != rank) ++numGhostPoints; 1340 } 1341 ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr); 1342 ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr); 1343 for (p = 0, gp = 0; p < numLeaves; ++p) { 1344 if (lowners[p].rank != rank) { 1345 ghostPoints[gp] = leaves ? leaves[p] : p; 1346 remotePoints[gp].rank = lowners[p].rank; 1347 remotePoints[gp].index = lowners[p].index; 1348 ++gp; 1349 } 1350 } 1351 ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr); 1352 ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 1353 ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr); 1354 } 1355 pmesh->useCone = mesh->useCone; 1356 pmesh->useClosure = mesh->useClosure; 1357 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1358 PetscFunctionReturn(0); 1359 } 1360 1361 #undef __FUNCT__ 1362 #define __FUNCT__ "DMPlexCreatePointSF" 1363 /*@C 1364 DMPlexDerivePointSF - Build a point SF from an SF describing a point migration 1365 1366 Input Parameter: 1367 + dm - The source DMPlex object 1368 . migrationSF - The star forest that describes the parallel point remapping 1369 . ownership - Flag causing a vote to determine point ownership 1370 1371 Output Parameter: 1372 - pointSF - The star forest describing the point overlap in the remapped DM 1373 1374 Level: developer 1375 1376 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap() 1377 @*/ 1378 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF) 1379 { 1380 PetscMPIInt rank; 1381 PetscInt p, nroots, nleaves, idx, npointLeaves; 1382 PetscInt *pointLocal; 1383 const PetscInt *leaves; 1384 const PetscSFNode *roots; 1385 PetscSFNode *rootNodes, *leafNodes, *pointRemote; 1386 PetscErrorCode ierr; 1387 1388 PetscFunctionBegin; 1389 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1390 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 1391 1392 ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr); 1393 ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr); 1394 if (ownership) { 1395 /* Point ownership vote: Process with highest rank ownes shared points */ 1396 for (p = 0; p < nleaves; ++p) { 1397 /* Either put in a bid or we know we own it */ 1398 leafNodes[p].rank = rank; 1399 leafNodes[p].index = p; 1400 } 1401 for (p = 0; p < nroots; p++) { 1402 /* Root must not participate in the reduction, flag so that MAXLOC does not use */ 1403 rootNodes[p].rank = -3; 1404 rootNodes[p].index = -3; 1405 } 1406 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr); 1407 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr); 1408 } else { 1409 for (p = 0; p < nroots; p++) { 1410 rootNodes[p].index = -1; 1411 rootNodes[p].rank = rank; 1412 }; 1413 for (p = 0; p < nleaves; p++) { 1414 /* Write new local id into old location */ 1415 if (roots[p].rank == rank) { 1416 rootNodes[roots[p].index].index = leaves ? leaves[p] : p; 1417 } 1418 } 1419 } 1420 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr); 1421 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr); 1422 1423 for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;} 1424 ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr); 1425 ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr); 1426 for (idx = 0, p = 0; p < nleaves; p++) { 1427 if (leafNodes[p].rank != rank) { 1428 pointLocal[idx] = p; 1429 pointRemote[idx] = leafNodes[p]; 1430 idx++; 1431 } 1432 } 1433 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr); 1434 ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr); 1435 ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1436 ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr); 1437 PetscFunctionReturn(0); 1438 } 1439 1440 #undef __FUNCT__ 1441 #define __FUNCT__ "DMPlexMigrate" 1442 /*@C 1443 DMPlexMigrate - Migrates internal DM data over the supplied star forest 1444 1445 Input Parameter: 1446 + dm - The source DMPlex object 1447 . sf - The star forest communication context describing the migration pattern 1448 1449 Output Parameter: 1450 - targetDM - The target DMPlex object 1451 1452 Level: intermediate 1453 1454 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap() 1455 @*/ 1456 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM) 1457 { 1458 MPI_Comm comm; 1459 PetscInt dim, nroots; 1460 PetscSF sfPoint; 1461 ISLocalToGlobalMapping ltogMigration; 1462 ISLocalToGlobalMapping ltogOriginal = NULL; 1463 PetscBool flg; 1464 PetscErrorCode ierr; 1465 1466 PetscFunctionBegin; 1467 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1468 ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr); 1469 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 1470 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1471 ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr); 1472 1473 /* Check for a one-to-all distribution pattern */ 1474 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 1475 ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 1476 if (nroots >= 0) { 1477 IS isOriginal; 1478 PetscInt n, size, nleaves; 1479 PetscInt *numbering_orig, *numbering_new; 1480 /* Get the original point numbering */ 1481 ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr); 1482 ierr = ISLocalToGlobalMappingCreateIS(isOriginal, <ogOriginal);CHKERRQ(ierr); 1483 ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr); 1484 ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr); 1485 /* Convert to positive global numbers */ 1486 for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);} 1487 /* Derive the new local-to-global mapping from the old one */ 1488 ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr); 1489 ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr); 1490 ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr); 1491 ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr); 1492 ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, <ogMigration);CHKERRQ(ierr); 1493 ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr); 1494 ierr = ISDestroy(&isOriginal);CHKERRQ(ierr); 1495 } else { 1496 /* One-to-all distribution pattern: We can derive LToG from SF */ 1497 ierr = ISLocalToGlobalMappingCreateSF(sf, 0, <ogMigration);CHKERRQ(ierr); 1498 } 1499 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 1500 if (flg) { 1501 ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr); 1502 ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr); 1503 } 1504 /* Migrate DM data to target DM */ 1505 ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr); 1506 ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr); 1507 ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr); 1508 ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr); 1509 ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr); 1510 ierr = ISLocalToGlobalMappingDestroy(<ogOriginal);CHKERRQ(ierr); 1511 ierr = ISLocalToGlobalMappingDestroy(<ogMigration);CHKERRQ(ierr); 1512 ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr); 1513 PetscFunctionReturn(0); 1514 } 1515 1516 #undef __FUNCT__ 1517 #define __FUNCT__ "DMPlexDistribute" 1518 /*@C 1519 DMPlexDistribute - Distributes the mesh and any associated sections. 1520 1521 Not Collective 1522 1523 Input Parameter: 1524 + dm - The original DMPlex object 1525 - overlap - The overlap of partitions, 0 is the default 1526 1527 Output Parameter: 1528 + sf - The PetscSF used for point distribution 1529 - parallelMesh - The distributed DMPlex object, or NULL 1530 1531 Note: If the mesh was not distributed, the return value is NULL. 1532 1533 The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and 1534 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 1535 representation on the mesh. 1536 1537 Level: intermediate 1538 1539 .keywords: mesh, elements 1540 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 1541 @*/ 1542 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel) 1543 { 1544 MPI_Comm comm; 1545 PetscPartitioner partitioner; 1546 IS cellPart; 1547 PetscSection cellPartSection; 1548 DM dmCoord; 1549 DMLabel lblPartition, lblMigration; 1550 PetscSF sfProcess, sfMigration, sfStratified, sfPoint; 1551 PetscBool flg; 1552 PetscMPIInt rank, numProcs, p; 1553 PetscErrorCode ierr; 1554 1555 PetscFunctionBegin; 1556 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1557 if (sf) PetscValidPointer(sf,4); 1558 PetscValidPointer(dmParallel,5); 1559 1560 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1561 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1562 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1563 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 1564 1565 *dmParallel = NULL; 1566 if (numProcs == 1) PetscFunctionReturn(0); 1567 1568 /* Create cell partition */ 1569 ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1570 ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr); 1571 ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr); 1572 ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr); 1573 { 1574 /* Convert partition to DMLabel */ 1575 PetscInt proc, pStart, pEnd, npoints, poffset; 1576 const PetscInt *points; 1577 ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr); 1578 ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr); 1579 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr); 1580 for (proc = pStart; proc < pEnd; proc++) { 1581 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr); 1582 ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr); 1583 for (p = poffset; p < poffset+npoints; p++) { 1584 ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr); 1585 } 1586 } 1587 ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr); 1588 } 1589 ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr); 1590 { 1591 /* Build a global process SF */ 1592 PetscSFNode *remoteProc; 1593 ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr); 1594 for (p = 0; p < numProcs; ++p) { 1595 remoteProc[p].rank = p; 1596 remoteProc[p].index = rank; 1597 } 1598 ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr); 1599 ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr); 1600 ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr); 1601 } 1602 ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr); 1603 ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr); 1604 ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr); 1605 /* Stratify the SF in case we are migrating an already parallel plex */ 1606 ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr); 1607 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr); 1608 sfMigration = sfStratified; 1609 ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1610 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 1611 if (flg) { 1612 ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1613 ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1614 } 1615 1616 /* Create non-overlapping parallel DM and migrate internal data */ 1617 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr); 1618 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr); 1619 ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr); 1620 1621 /* Build the point SF without overlap */ 1622 ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr); 1623 ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr); 1624 ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr); 1625 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1626 if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);} 1627 1628 if (overlap > 0) { 1629 DM dmOverlap; 1630 PetscInt nroots, nleaves; 1631 PetscSFNode *newRemote; 1632 const PetscSFNode *oldRemote; 1633 PetscSF sfOverlap, sfOverlapPoint; 1634 /* Add the partition overlap to the distributed DM */ 1635 ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr); 1636 ierr = DMDestroy(dmParallel);CHKERRQ(ierr); 1637 *dmParallel = dmOverlap; 1638 if (flg) { 1639 ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr); 1640 ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr); 1641 } 1642 1643 /* Re-map the migration SF to establish the full migration pattern */ 1644 ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr); 1645 ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr); 1646 ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr); 1647 ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1648 ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1649 ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr); 1650 ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1651 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr); 1652 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr); 1653 sfMigration = sfOverlapPoint; 1654 } 1655 /* Cleanup Partition */ 1656 ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr); 1657 ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr); 1658 ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr); 1659 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr); 1660 ierr = ISDestroy(&cellPart);CHKERRQ(ierr); 1661 /* Copy BC */ 1662 ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr); 1663 /* Create sfNatural */ 1664 if (dm->useNatural) { 1665 PetscSection section; 1666 1667 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1668 ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr); 1669 } 1670 /* Cleanup */ 1671 if (sf) {*sf = sfMigration;} 1672 else {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);} 1673 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1674 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1675 PetscFunctionReturn(0); 1676 } 1677 1678 #undef __FUNCT__ 1679 #define __FUNCT__ "DMPlexDistributeOverlap" 1680 /*@C 1681 DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM. 1682 1683 Not Collective 1684 1685 Input Parameter: 1686 + dm - The non-overlapping distrbuted DMPlex object 1687 - overlap - The overlap of partitions, 0 is the default 1688 1689 Output Parameter: 1690 + sf - The PetscSF used for point distribution 1691 - dmOverlap - The overlapping distributed DMPlex object, or NULL 1692 1693 Note: If the mesh was not distributed, the return value is NULL. 1694 1695 The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and 1696 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 1697 representation on the mesh. 1698 1699 Level: intermediate 1700 1701 .keywords: mesh, elements 1702 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 1703 @*/ 1704 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap) 1705 { 1706 MPI_Comm comm; 1707 PetscMPIInt rank; 1708 PetscSection rootSection, leafSection; 1709 IS rootrank, leafrank; 1710 DM dmCoord; 1711 DMLabel lblOverlap; 1712 PetscSF sfOverlap, sfStratified, sfPoint; 1713 PetscErrorCode ierr; 1714 1715 PetscFunctionBegin; 1716 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1717 if (sf) PetscValidPointer(sf, 3); 1718 PetscValidPointer(dmOverlap, 4); 1719 1720 ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr); 1721 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1722 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1723 1724 /* Compute point overlap with neighbouring processes on the distributed DM */ 1725 ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1726 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 1727 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr); 1728 ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr); 1729 ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr); 1730 /* Convert overlap label to stratified migration SF */ 1731 ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr); 1732 ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr); 1733 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr); 1734 sfOverlap = sfStratified; 1735 ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr); 1736 ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr); 1737 1738 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 1739 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 1740 ierr = ISDestroy(&rootrank);CHKERRQ(ierr); 1741 ierr = ISDestroy(&leafrank);CHKERRQ(ierr); 1742 ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1743 1744 /* Build the overlapping DM */ 1745 ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr); 1746 ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr); 1747 ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr); 1748 /* Build the new point SF */ 1749 ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr); 1750 ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr); 1751 ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr); 1752 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1753 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1754 /* Cleanup overlap partition */ 1755 ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr); 1756 if (sf) *sf = sfOverlap; 1757 else {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);} 1758 ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr); 1759 PetscFunctionReturn(0); 1760 } 1761 1762 #undef __FUNCT__ 1763 #define __FUNCT__ "DMPlexGetGatherDM" 1764 /*@C 1765 DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the 1766 root process of the original's communicator. 1767 1768 Input Parameters: 1769 . dm - the original DMPlex object 1770 1771 Output Parameters: 1772 . gatherMesh - the gathered DM object, or NULL 1773 1774 Level: intermediate 1775 1776 .keywords: mesh 1777 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM() 1778 @*/ 1779 PetscErrorCode DMPlexGetGatherDM(DM dm, DM * gatherMesh) 1780 { 1781 MPI_Comm comm; 1782 PetscMPIInt size; 1783 PetscPartitioner oldPart, gatherPart; 1784 PetscErrorCode ierr; 1785 1786 PetscFunctionBegin; 1787 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1788 comm = PetscObjectComm((PetscObject)dm); 1789 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1790 *gatherMesh = NULL; 1791 if (size == 1) PetscFunctionReturn(0); 1792 ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr); 1793 ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr); 1794 ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr); 1795 ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr); 1796 ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr); 1797 ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr); 1798 ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr); 1799 ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr); 1800 ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr); 1801 PetscFunctionReturn(0); 1802 } 1803 1804 #undef __FUNCT__ 1805 #define __FUNCT__ "DMPlexGetRedundantDM" 1806 /*@C 1807 DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process. 1808 1809 Input Parameters: 1810 . dm - the original DMPlex object 1811 1812 Output Parameters: 1813 . redundantMesh - the redundant DM object, or NULL 1814 1815 Level: intermediate 1816 1817 .keywords: mesh 1818 .seealso: DMPlexDistribute(), DMPlexGetGatherDM() 1819 @*/ 1820 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM * redundantMesh) 1821 { 1822 MPI_Comm comm; 1823 PetscMPIInt size, rank; 1824 PetscInt pStart, pEnd, p; 1825 PetscInt numPoints = -1; 1826 PetscSF migrationSF, sfPoint; 1827 DM gatherDM, dmCoord; 1828 PetscSFNode *points; 1829 PetscErrorCode ierr; 1830 1831 PetscFunctionBegin; 1832 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1833 comm = PetscObjectComm((PetscObject)dm); 1834 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1835 *redundantMesh = NULL; 1836 if (size == 1) PetscFunctionReturn(0); 1837 ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr); 1838 if (!gatherDM) PetscFunctionReturn(0); 1839 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1840 ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr); 1841 numPoints = pEnd - pStart; 1842 ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr); 1843 ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr); 1844 ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr); 1845 for (p = 0; p < numPoints; p++) { 1846 points[p].index = p; 1847 points[p].rank = 0; 1848 } 1849 ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr); 1850 ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr); 1851 ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr); 1852 ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr); 1853 ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr); 1854 ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr); 1855 ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr); 1856 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1857 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1858 ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr); 1859 ierr = DMDestroy(&gatherDM);CHKERRQ(ierr); 1860 PetscFunctionReturn(0); 1861 } 1862