1 #include <petsc-private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 #include <petscsf.h> 3 4 #undef __FUNCT__ 5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone" 6 /*@ 7 DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first 8 9 Input Parameters: 10 + dm - The DM object 11 - useCone - Flag to use the cone first 12 13 Level: intermediate 14 15 Notes: 16 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 17 $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 18 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 19 20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 21 @*/ 22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone) 23 { 24 DM_Plex *mesh = (DM_Plex *) dm->data; 25 26 PetscFunctionBegin; 27 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 28 mesh->useCone = useCone; 29 PetscFunctionReturn(0); 30 } 31 32 #undef __FUNCT__ 33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone" 34 /*@ 35 DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first 36 37 Input Parameter: 38 . dm - The DM object 39 40 Output Parameter: 41 . useCone - Flag to use the cone first 42 43 Level: intermediate 44 45 Notes: 46 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 47 $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 48 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 49 50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 51 @*/ 52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone) 53 { 54 DM_Plex *mesh = (DM_Plex *) dm->data; 55 56 PetscFunctionBegin; 57 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 58 PetscValidIntPointer(useCone, 2); 59 *useCone = mesh->useCone; 60 PetscFunctionReturn(0); 61 } 62 63 #undef __FUNCT__ 64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure" 65 /*@ 66 DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure 67 68 Input Parameters: 69 + dm - The DM object 70 - useClosure - Flag to use the closure 71 72 Level: intermediate 73 74 Notes: 75 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 76 $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 77 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 78 79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 80 @*/ 81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure) 82 { 83 DM_Plex *mesh = (DM_Plex *) dm->data; 84 85 PetscFunctionBegin; 86 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 87 mesh->useClosure = useClosure; 88 PetscFunctionReturn(0); 89 } 90 91 #undef __FUNCT__ 92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure" 93 /*@ 94 DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure 95 96 Input Parameter: 97 . dm - The DM object 98 99 Output Parameter: 100 . useClosure - Flag to use the closure 101 102 Level: intermediate 103 104 Notes: 105 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 106 $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 107 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 108 109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 110 @*/ 111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure) 112 { 113 DM_Plex *mesh = (DM_Plex *) dm->data; 114 115 PetscFunctionBegin; 116 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 117 PetscValidIntPointer(useClosure, 2); 118 *useClosure = mesh->useClosure; 119 PetscFunctionReturn(0); 120 } 121 122 #undef __FUNCT__ 123 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal" 124 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 125 { 126 const PetscInt *cone = NULL; 127 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c; 128 PetscErrorCode ierr; 129 130 PetscFunctionBeginHot; 131 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 132 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 133 for (c = 0; c < coneSize; ++c) { 134 const PetscInt *support = NULL; 135 PetscInt supportSize, s, q; 136 137 ierr = DMPlexGetSupportSize(dm, cone[c], &supportSize);CHKERRQ(ierr); 138 ierr = DMPlexGetSupport(dm, cone[c], &support);CHKERRQ(ierr); 139 for (s = 0; s < supportSize; ++s) { 140 for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) { 141 if (support[s] == adj[q]) break; 142 } 143 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 144 } 145 } 146 *adjSize = numAdj; 147 PetscFunctionReturn(0); 148 } 149 150 #undef __FUNCT__ 151 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal" 152 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 153 { 154 const PetscInt *support = NULL; 155 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s; 156 PetscErrorCode ierr; 157 158 PetscFunctionBeginHot; 159 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr); 160 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr); 161 for (s = 0; s < supportSize; ++s) { 162 const PetscInt *cone = NULL; 163 PetscInt coneSize, c, q; 164 165 ierr = DMPlexGetConeSize(dm, support[s], &coneSize);CHKERRQ(ierr); 166 ierr = DMPlexGetCone(dm, support[s], &cone);CHKERRQ(ierr); 167 for (c = 0; c < coneSize; ++c) { 168 for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) { 169 if (cone[c] == adj[q]) break; 170 } 171 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 172 } 173 } 174 *adjSize = numAdj; 175 PetscFunctionReturn(0); 176 } 177 178 #undef __FUNCT__ 179 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal" 180 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[]) 181 { 182 PetscInt *star = NULL; 183 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s; 184 PetscErrorCode ierr; 185 186 PetscFunctionBeginHot; 187 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 188 for (s = 0; s < starSize*2; s += 2) { 189 const PetscInt *closure = NULL; 190 PetscInt closureSize, c, q; 191 192 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 193 for (c = 0; c < closureSize*2; c += 2) { 194 for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) { 195 if (closure[c] == adj[q]) break; 196 } 197 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 198 } 199 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 200 } 201 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 202 *adjSize = numAdj; 203 PetscFunctionReturn(0); 204 } 205 206 #undef __FUNCT__ 207 #define __FUNCT__ "DMPlexGetAdjacency_Internal" 208 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscInt *adjSize, PetscInt *adj[]) 209 { 210 static PetscInt asiz = 0; 211 PetscErrorCode ierr; 212 213 PetscFunctionBeginHot; 214 if (!*adj) { 215 PetscInt depth, maxConeSize, maxSupportSize; 216 217 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 218 ierr = DMPlexGetMaxSizes(dm, &maxConeSize, &maxSupportSize);CHKERRQ(ierr); 219 asiz = PetscPowInt(maxConeSize, depth+1) * PetscPowInt(maxSupportSize, depth+1) + 1; 220 ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr); 221 } 222 if (*adjSize < 0) *adjSize = asiz; 223 if (useTransitiveClosure) { 224 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr); 225 } else if (useCone) { 226 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 227 } else { 228 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 229 } 230 PetscFunctionReturn(0); 231 } 232 233 #undef __FUNCT__ 234 #define __FUNCT__ "DMPlexGetAdjacency" 235 /*@ 236 DMPlexGetAdjacency - Return all points adjacent to the given point 237 238 Input Parameters: 239 + dm - The DM object 240 . p - The point 241 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE 242 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize 243 244 Output Parameters: 245 + adjSize - The number of adjacent points 246 - adj - The adjacent points 247 248 Level: advanced 249 250 Notes: The user must PetscFree the adj array if it was not passed in. 251 252 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator() 253 @*/ 254 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[]) 255 { 256 DM_Plex *mesh = (DM_Plex *) dm->data; 257 PetscErrorCode ierr; 258 259 PetscFunctionBeginHot; 260 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 261 PetscValidPointer(adjSize,3); 262 PetscValidPointer(adj,4); 263 ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, adjSize, adj);CHKERRQ(ierr); 264 PetscFunctionReturn(0); 265 } 266 267 #undef __FUNCT__ 268 #define __FUNCT__ "DMPlexDistributeField" 269 /*@ 270 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 271 272 Collective on DM 273 274 Input Parameters: 275 + dm - The DMPlex object 276 . pointSF - The PetscSF describing the communication pattern 277 . originalSection - The PetscSection for existing data layout 278 - originalVec - The existing data 279 280 Output Parameters: 281 + newSection - The PetscSF describing the new data layout 282 - newVec - The new data 283 284 Level: developer 285 286 .seealso: DMPlexDistribute(), DMPlexDistributeData() 287 @*/ 288 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec) 289 { 290 PetscSF fieldSF; 291 PetscInt *remoteOffsets, fieldSize; 292 PetscScalar *originalValues, *newValues; 293 PetscErrorCode ierr; 294 295 PetscFunctionBegin; 296 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 297 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 298 299 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 300 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr); 301 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr); 302 303 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr); 304 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr); 305 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 306 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 307 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 308 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 309 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr); 310 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr); 311 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 312 PetscFunctionReturn(0); 313 } 314 315 #undef __FUNCT__ 316 #define __FUNCT__ "DMPlexDistributeData" 317 /*@ 318 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 319 320 Collective on DM 321 322 Input Parameters: 323 + dm - The DMPlex object 324 . pointSF - The PetscSF describing the communication pattern 325 . originalSection - The PetscSection for existing data layout 326 . datatype - The type of data 327 - originalData - The existing data 328 329 Output Parameters: 330 + newSection - The PetscSF describing the new data layout 331 - newData - The new data 332 333 Level: developer 334 335 .seealso: DMPlexDistribute(), DMPlexDistributeField() 336 @*/ 337 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData) 338 { 339 PetscSF fieldSF; 340 PetscInt *remoteOffsets, fieldSize; 341 PetscMPIInt dataSize; 342 PetscErrorCode ierr; 343 344 PetscFunctionBegin; 345 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 346 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 347 348 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 349 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr); 350 ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr); 351 352 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 353 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 354 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 355 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 356 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 357 PetscFunctionReturn(0); 358 } 359 360 #undef __FUNCT__ 361 #define __FUNCT__ "DMPlexDistribute" 362 /*@C 363 DMPlexDistribute - Distributes the mesh and any associated sections. 364 365 Not Collective 366 367 Input Parameter: 368 + dm - The original DMPlex object 369 . partitioner - The partitioning package, or NULL for the default 370 - overlap - The overlap of partitions, 0 is the default 371 372 Output Parameter: 373 + sf - The PetscSF used for point distribution 374 - parallelMesh - The distributed DMPlex object, or NULL 375 376 Note: If the mesh was not distributed, the return value is NULL. 377 378 The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and 379 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 380 representation on the mesh. 381 382 Level: intermediate 383 384 .keywords: mesh, elements 385 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 386 @*/ 387 PetscErrorCode DMPlexDistribute(DM dm, const char partitioner[], PetscInt overlap, PetscSF *sf, DM *dmParallel) 388 { 389 DM_Plex *mesh = (DM_Plex*) dm->data, *pmesh; 390 MPI_Comm comm; 391 const PetscInt height = 0; 392 PetscInt dim, numRemoteRanks; 393 IS origCellPart, origPart, cellPart, part; 394 PetscSection origCellPartSection, origPartSection, cellPartSection, partSection; 395 PetscSFNode *remoteRanks; 396 PetscSF partSF, pointSF, coneSF; 397 ISLocalToGlobalMapping renumbering; 398 PetscSection originalConeSection, newConeSection; 399 PetscInt *remoteOffsets; 400 PetscInt *cones, *newCones, newConesSize; 401 PetscBool flg; 402 PetscMPIInt rank, numProcs, p; 403 PetscErrorCode ierr; 404 405 PetscFunctionBegin; 406 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 407 if (sf) PetscValidPointer(sf,4); 408 PetscValidPointer(dmParallel,5); 409 410 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 411 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 412 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 413 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 414 415 *dmParallel = NULL; 416 if (numProcs == 1) PetscFunctionReturn(0); 417 418 ierr = DMPlexGetDimension(dm, &dim);CHKERRQ(ierr); 419 /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */ 420 ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr); 421 if (overlap > 1) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Overlap > 1 not yet implemented"); 422 ierr = DMPlexCreatePartition(dm, partitioner, height, overlap > 0 ? PETSC_TRUE : PETSC_FALSE, &cellPartSection, &cellPart, &origCellPartSection, &origCellPart);CHKERRQ(ierr); 423 /* Create SF assuming a serial partition for all processes: Could check for IS length here */ 424 if (!rank) numRemoteRanks = numProcs; 425 else numRemoteRanks = 0; 426 ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr); 427 for (p = 0; p < numRemoteRanks; ++p) { 428 remoteRanks[p].rank = p; 429 remoteRanks[p].index = 0; 430 } 431 ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr); 432 ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr); 433 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 434 if (flg) { 435 ierr = PetscPrintf(comm, "Cell Partition:\n");CHKERRQ(ierr); 436 ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 437 ierr = ISView(cellPart, NULL);CHKERRQ(ierr); 438 if (origCellPart) { 439 ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr); 440 ierr = PetscSectionView(origCellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 441 ierr = ISView(origCellPart, NULL);CHKERRQ(ierr); 442 } 443 ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr); 444 } 445 /* Close the partition over the mesh */ 446 ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr); 447 ierr = ISDestroy(&cellPart);CHKERRQ(ierr); 448 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr); 449 /* Create new mesh */ 450 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr); 451 ierr = DMPlexSetDimension(*dmParallel, dim);CHKERRQ(ierr); 452 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr); 453 pmesh = (DM_Plex*) (*dmParallel)->data; 454 /* Distribute sieve points and the global point numbering (replaces creating remote bases) */ 455 ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr); 456 if (flg) { 457 ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr); 458 ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 459 ierr = ISView(part, NULL);CHKERRQ(ierr); 460 ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr); 461 ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr); 462 ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr); 463 } 464 ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr); 465 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 466 /* Distribute cone section */ 467 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr); 468 ierr = DMPlexGetConeSection(*dmParallel, &newConeSection);CHKERRQ(ierr); 469 ierr = PetscSFDistributeSection(pointSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr); 470 ierr = DMSetUp(*dmParallel);CHKERRQ(ierr); 471 { 472 PetscInt pStart, pEnd, p; 473 474 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr); 475 for (p = pStart; p < pEnd; ++p) { 476 PetscInt coneSize; 477 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr); 478 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize); 479 } 480 } 481 /* Communicate and renumber cones */ 482 ierr = PetscSFCreateSectionSF(pointSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr); 483 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); 484 ierr = DMPlexGetCones(*dmParallel, &newCones);CHKERRQ(ierr); 485 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 486 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 487 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr); 488 ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr); 489 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr); 490 if (flg) { 491 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr); 492 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 493 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr); 494 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 495 ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr); 496 } 497 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr); 498 ierr = DMPlexGetConeOrientations(*dmParallel, &newCones);CHKERRQ(ierr); 499 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 500 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 501 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr); 502 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 503 /* Create supports and stratify sieve */ 504 { 505 PetscInt pStart, pEnd; 506 507 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr); 508 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr); 509 } 510 ierr = DMPlexSymmetrize(*dmParallel);CHKERRQ(ierr); 511 ierr = DMPlexStratify(*dmParallel);CHKERRQ(ierr); 512 /* Distribute Coordinates */ 513 { 514 PetscSection originalCoordSection, newCoordSection; 515 Vec originalCoordinates, newCoordinates; 516 PetscInt bs; 517 const char *name; 518 519 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr); 520 ierr = DMGetCoordinateSection(*dmParallel, &newCoordSection);CHKERRQ(ierr); 521 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr); 522 ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr); 523 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr); 524 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr); 525 526 ierr = DMPlexDistributeField(dm, pointSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr); 527 ierr = DMSetCoordinatesLocal(*dmParallel, newCoordinates);CHKERRQ(ierr); 528 ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr); 529 ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr); 530 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr); 531 } 532 /* Distribute labels */ 533 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 534 { 535 DMLabel next = mesh->labels, newNext = pmesh->labels; 536 PetscInt numLabels = 0, l; 537 538 /* Bcast number of labels */ 539 while (next) {++numLabels; next = next->next;} 540 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr); 541 next = mesh->labels; 542 for (l = 0; l < numLabels; ++l) { 543 DMLabel labelNew; 544 PetscBool isdepth; 545 546 /* Skip "depth" because it is recreated */ 547 if (!rank) {ierr = PetscStrcmp(next->name, "depth", &isdepth);CHKERRQ(ierr);} 548 ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr); 549 if (isdepth) {if (!rank) next = next->next; continue;} 550 ierr = DMLabelDistribute(next, partSection, part, renumbering, &labelNew);CHKERRQ(ierr); 551 /* Insert into list */ 552 if (newNext) newNext->next = labelNew; 553 else pmesh->labels = labelNew; 554 newNext = labelNew; 555 if (!rank) next = next->next; 556 } 557 } 558 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 559 /* Setup hybrid structure */ 560 { 561 const PetscInt *gpoints; 562 PetscInt depth, n, d; 563 564 for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];} 565 ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr); 566 ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr); 567 ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr); 568 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 569 for (d = 0; d <= dim; ++d) { 570 PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p; 571 572 if (pmax < 0) continue; 573 ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr); 574 ierr = DMPlexGetDepthStratum(*dmParallel, d, NULL, &pEnd);CHKERRQ(ierr); 575 ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr); 576 for (p = 0; p < n; ++p) { 577 const PetscInt point = gpoints[p]; 578 579 if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax; 580 } 581 if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax; 582 else pmesh->hybridPointMax[d] = -1; 583 } 584 ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr); 585 } 586 /* Cleanup Partition */ 587 ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr); 588 ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr); 589 ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr); 590 ierr = ISDestroy(&part);CHKERRQ(ierr); 591 /* Create point SF for parallel mesh */ 592 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 593 { 594 const PetscInt *leaves; 595 PetscSFNode *remotePoints, *rowners, *lowners; 596 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints; 597 PetscInt pStart, pEnd; 598 599 ierr = DMPlexGetChart(*dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 600 ierr = PetscSFGetGraph(pointSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr); 601 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr); 602 for (p=0; p<numRoots; p++) { 603 rowners[p].rank = -1; 604 rowners[p].index = -1; 605 } 606 if (origCellPart) { 607 /* Make sure points in the original partition are not assigned to other procs */ 608 const PetscInt *origPoints; 609 610 ierr = DMPlexCreatePartitionClosure(dm, origCellPartSection, origCellPart, &origPartSection, &origPart);CHKERRQ(ierr); 611 ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr); 612 for (p = 0; p < numProcs; ++p) { 613 PetscInt dof, off, d; 614 615 ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr); 616 ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr); 617 for (d = off; d < off+dof; ++d) { 618 rowners[origPoints[d]].rank = p; 619 } 620 } 621 ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr); 622 ierr = ISDestroy(&origPart);CHKERRQ(ierr); 623 ierr = PetscSectionDestroy(&origPartSection);CHKERRQ(ierr); 624 } 625 ierr = ISDestroy(&origCellPart);CHKERRQ(ierr); 626 ierr = PetscSectionDestroy(&origCellPartSection);CHKERRQ(ierr); 627 628 ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 629 ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 630 for (p = 0; p < numLeaves; ++p) { 631 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */ 632 lowners[p].rank = rank; 633 lowners[p].index = leaves ? leaves[p] : p; 634 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */ 635 lowners[p].rank = -2; 636 lowners[p].index = -2; 637 } 638 } 639 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */ 640 rowners[p].rank = -3; 641 rowners[p].index = -3; 642 } 643 ierr = PetscSFReduceBegin(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 644 ierr = PetscSFReduceEnd(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 645 ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 646 ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 647 for (p = 0; p < numLeaves; ++p) { 648 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed"); 649 if (lowners[p].rank != rank) ++numGhostPoints; 650 } 651 ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr); 652 ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr); 653 for (p = 0, gp = 0; p < numLeaves; ++p) { 654 if (lowners[p].rank != rank) { 655 ghostPoints[gp] = leaves ? leaves[p] : p; 656 remotePoints[gp].rank = lowners[p].rank; 657 remotePoints[gp].index = lowners[p].index; 658 ++gp; 659 } 660 } 661 ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr); 662 ierr = PetscSFSetGraph((*dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 663 ierr = PetscSFSetFromOptions((*dmParallel)->sf);CHKERRQ(ierr); 664 } 665 pmesh->useCone = mesh->useCone; 666 pmesh->useClosure = mesh->useClosure; 667 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 668 /* Copy BC */ 669 ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr); 670 /* Cleanup */ 671 if (sf) {*sf = pointSF;} 672 else {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);} 673 ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr); 674 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 675 PetscFunctionReturn(0); 676 } 677