1 #include <petsc-private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 #include <petscsf.h> 3 4 #undef __FUNCT__ 5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone" 6 /*@ 7 DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first 8 9 Input Parameters: 10 + dm - The DM object 11 - useCone - Flag to use the cone first 12 13 Level: intermediate 14 15 Notes: 16 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 17 $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 18 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 19 20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 21 @*/ 22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone) 23 { 24 DM_Plex *mesh = (DM_Plex *) dm->data; 25 26 PetscFunctionBegin; 27 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 28 mesh->useCone = useCone; 29 PetscFunctionReturn(0); 30 } 31 32 #undef __FUNCT__ 33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone" 34 /*@ 35 DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first 36 37 Input Parameter: 38 . dm - The DM object 39 40 Output Parameter: 41 . useCone - Flag to use the cone first 42 43 Level: intermediate 44 45 Notes: 46 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 47 $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 48 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 49 50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 51 @*/ 52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone) 53 { 54 DM_Plex *mesh = (DM_Plex *) dm->data; 55 56 PetscFunctionBegin; 57 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 58 PetscValidIntPointer(useCone, 2); 59 *useCone = mesh->useCone; 60 PetscFunctionReturn(0); 61 } 62 63 #undef __FUNCT__ 64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure" 65 /*@ 66 DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure 67 68 Input Parameters: 69 + dm - The DM object 70 - useClosure - Flag to use the closure 71 72 Level: intermediate 73 74 Notes: 75 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 76 $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 77 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 78 79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 80 @*/ 81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure) 82 { 83 DM_Plex *mesh = (DM_Plex *) dm->data; 84 85 PetscFunctionBegin; 86 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 87 mesh->useClosure = useClosure; 88 PetscFunctionReturn(0); 89 } 90 91 #undef __FUNCT__ 92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure" 93 /*@ 94 DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure 95 96 Input Parameter: 97 . dm - The DM object 98 99 Output Parameter: 100 . useClosure - Flag to use the closure 101 102 Level: intermediate 103 104 Notes: 105 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 106 $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 107 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 108 109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 110 @*/ 111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure) 112 { 113 DM_Plex *mesh = (DM_Plex *) dm->data; 114 115 PetscFunctionBegin; 116 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 117 PetscValidIntPointer(useClosure, 2); 118 *useClosure = mesh->useClosure; 119 PetscFunctionReturn(0); 120 } 121 122 #undef __FUNCT__ 123 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal" 124 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 125 { 126 const PetscInt *cone = NULL; 127 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c; 128 PetscErrorCode ierr; 129 130 PetscFunctionBeginHot; 131 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 132 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 133 for (c = 0; c < coneSize; ++c) { 134 const PetscInt *support = NULL; 135 PetscInt supportSize, s, q; 136 137 ierr = DMPlexGetSupportSize(dm, cone[c], &supportSize);CHKERRQ(ierr); 138 ierr = DMPlexGetSupport(dm, cone[c], &support);CHKERRQ(ierr); 139 for (s = 0; s < supportSize; ++s) { 140 for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) { 141 if (support[s] == adj[q]) break; 142 } 143 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 144 } 145 } 146 *adjSize = numAdj; 147 PetscFunctionReturn(0); 148 } 149 150 #undef __FUNCT__ 151 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal" 152 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 153 { 154 const PetscInt *support = NULL; 155 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s; 156 PetscErrorCode ierr; 157 158 PetscFunctionBeginHot; 159 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr); 160 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr); 161 for (s = 0; s < supportSize; ++s) { 162 const PetscInt *cone = NULL; 163 PetscInt coneSize, c, q; 164 165 ierr = DMPlexGetConeSize(dm, support[s], &coneSize);CHKERRQ(ierr); 166 ierr = DMPlexGetCone(dm, support[s], &cone);CHKERRQ(ierr); 167 for (c = 0; c < coneSize; ++c) { 168 for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) { 169 if (cone[c] == adj[q]) break; 170 } 171 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 172 } 173 } 174 *adjSize = numAdj; 175 PetscFunctionReturn(0); 176 } 177 178 #undef __FUNCT__ 179 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal" 180 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[]) 181 { 182 PetscInt *star = NULL; 183 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s; 184 PetscErrorCode ierr; 185 186 PetscFunctionBeginHot; 187 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 188 for (s = 0; s < starSize*2; s += 2) { 189 const PetscInt *closure = NULL; 190 PetscInt closureSize, c, q; 191 192 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 193 for (c = 0; c < closureSize*2; c += 2) { 194 for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) { 195 if (closure[c] == adj[q]) break; 196 } 197 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 198 } 199 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 200 } 201 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 202 *adjSize = numAdj; 203 PetscFunctionReturn(0); 204 } 205 206 #undef __FUNCT__ 207 #define __FUNCT__ "DMPlexGetAdjacency_Internal" 208 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscInt *adjSize, PetscInt adj[]) 209 { 210 PetscErrorCode ierr; 211 212 PetscFunctionBeginHot; 213 if (useTransitiveClosure) { 214 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, adj);CHKERRQ(ierr); 215 } else if (useCone) { 216 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, adj);CHKERRQ(ierr); 217 } else { 218 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, adj);CHKERRQ(ierr); 219 } 220 PetscFunctionReturn(0); 221 } 222 223 #undef __FUNCT__ 224 #define __FUNCT__ "DMPlexGetAdjacency" 225 /*@ 226 DMPlexGetAdjacency - Return all points adjacent to the given point 227 228 Input Parameters: 229 + dm - The DM object 230 . p - The point 231 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE 232 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize 233 234 Output Parameters: 235 + adjSize - The number of adjacent points 236 - adj - The adjacent points 237 238 Level: advanced 239 240 Notes: The user must PetscFree the adj array if it was not passed in. 241 242 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator() 243 @*/ 244 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[]) 245 { 246 DM_Plex *mesh = (DM_Plex *) dm->data; 247 static PetscInt asiz = 0; 248 PetscErrorCode ierr; 249 250 PetscFunctionBeginHot; 251 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 252 PetscValidPointer(adjSize,3); 253 PetscValidPointer(adj,4); 254 if (!*adj) { 255 PetscInt depth, maxConeSize, maxSupportSize; 256 257 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 258 ierr = DMPlexGetMaxSizes(dm, &maxConeSize, &maxSupportSize);CHKERRQ(ierr); 259 asiz = PetscPowInt(maxConeSize, depth+1) * PetscPowInt(maxSupportSize, depth+1) + 1; 260 ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr); 261 } 262 if (*adjSize < 0) *adjSize = asiz; 263 ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, adjSize, *adj);CHKERRQ(ierr); 264 PetscFunctionReturn(0); 265 } 266 267 #undef __FUNCT__ 268 #define __FUNCT__ "DMPlexDistributeField" 269 /*@ 270 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 271 272 Collective on DM 273 274 Input Parameters: 275 + dm - The DMPlex object 276 . pointSF - The PetscSF describing the communication pattern 277 . originalSection - The PetscSection for existing data layout 278 - originalVec - The existing data 279 280 Output Parameters: 281 + newSection - The PetscSF describing the new data layout 282 - newVec - The new data 283 284 Level: developer 285 286 .seealso: DMPlexDistribute(), DMPlexDistributeData() 287 @*/ 288 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec) 289 { 290 PetscSF fieldSF; 291 PetscInt *remoteOffsets, fieldSize; 292 PetscScalar *originalValues, *newValues; 293 PetscErrorCode ierr; 294 295 PetscFunctionBegin; 296 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 297 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 298 299 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 300 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr); 301 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr); 302 303 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr); 304 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr); 305 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 306 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 307 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 308 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 309 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr); 310 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr); 311 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 312 PetscFunctionReturn(0); 313 } 314 315 #undef __FUNCT__ 316 #define __FUNCT__ "DMPlexDistributeData" 317 /*@ 318 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 319 320 Collective on DM 321 322 Input Parameters: 323 + dm - The DMPlex object 324 . pointSF - The PetscSF describing the communication pattern 325 . originalSection - The PetscSection for existing data layout 326 . datatype - The type of data 327 - originalData - The existing data 328 329 Output Parameters: 330 + newSection - The PetscSF describing the new data layout 331 - newData - The new data 332 333 Level: developer 334 335 .seealso: DMPlexDistribute(), DMPlexDistributeField() 336 @*/ 337 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData) 338 { 339 PetscSF fieldSF; 340 PetscInt *remoteOffsets, fieldSize; 341 PetscMPIInt dataSize; 342 PetscErrorCode ierr; 343 344 PetscFunctionBegin; 345 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 346 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 347 348 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 349 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr); 350 ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr); 351 352 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 353 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 354 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 355 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 356 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 357 PetscFunctionReturn(0); 358 } 359 360 #undef __FUNCT__ 361 #define __FUNCT__ "DMPlexDistribute" 362 /*@C 363 DMPlexDistribute - Distributes the mesh and any associated sections. 364 365 Not Collective 366 367 Input Parameter: 368 + dm - The original DMPlex object 369 . partitioner - The partitioning package, or NULL for the default 370 - overlap - The overlap of partitions, 0 is the default 371 372 Output Parameter: 373 + sf - The PetscSF used for point distribution 374 - parallelMesh - The distributed DMPlex object, or NULL 375 376 Note: If the mesh was not distributed, the return value is NULL 377 378 Level: intermediate 379 380 .keywords: mesh, elements 381 .seealso: DMPlexCreate(), DMPlexDistributeByFace() 382 @*/ 383 PetscErrorCode DMPlexDistribute(DM dm, const char partitioner[], PetscInt overlap, PetscSF *sf, DM *dmParallel) 384 { 385 DM_Plex *mesh = (DM_Plex*) dm->data, *pmesh; 386 MPI_Comm comm; 387 const PetscInt height = 0; 388 PetscInt dim, numRemoteRanks; 389 IS origCellPart, origPart, cellPart, part; 390 PetscSection origCellPartSection, origPartSection, cellPartSection, partSection; 391 PetscSFNode *remoteRanks; 392 PetscSF partSF, pointSF, coneSF; 393 ISLocalToGlobalMapping renumbering; 394 PetscSection originalConeSection, newConeSection; 395 PetscInt *remoteOffsets; 396 PetscInt *cones, *newCones, newConesSize; 397 PetscBool flg; 398 PetscMPIInt rank, numProcs, p; 399 PetscErrorCode ierr; 400 401 PetscFunctionBegin; 402 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 403 if (sf) PetscValidPointer(sf,4); 404 PetscValidPointer(dmParallel,5); 405 406 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 407 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 408 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 409 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 410 411 *dmParallel = NULL; 412 if (numProcs == 1) PetscFunctionReturn(0); 413 414 ierr = DMPlexGetDimension(dm, &dim);CHKERRQ(ierr); 415 /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */ 416 ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr); 417 if (overlap > 1) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Overlap > 1 not yet implemented"); 418 ierr = DMPlexCreatePartition(dm, partitioner, height, overlap > 0 ? PETSC_TRUE : PETSC_FALSE, &cellPartSection, &cellPart, &origCellPartSection, &origCellPart);CHKERRQ(ierr); 419 /* Create SF assuming a serial partition for all processes: Could check for IS length here */ 420 if (!rank) numRemoteRanks = numProcs; 421 else numRemoteRanks = 0; 422 ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr); 423 for (p = 0; p < numRemoteRanks; ++p) { 424 remoteRanks[p].rank = p; 425 remoteRanks[p].index = 0; 426 } 427 ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr); 428 ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr); 429 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 430 if (flg) { 431 ierr = PetscPrintf(comm, "Cell Partition:\n");CHKERRQ(ierr); 432 ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 433 ierr = ISView(cellPart, NULL);CHKERRQ(ierr); 434 if (origCellPart) { 435 ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr); 436 ierr = PetscSectionView(origCellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 437 ierr = ISView(origCellPart, NULL);CHKERRQ(ierr); 438 } 439 ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr); 440 } 441 /* Close the partition over the mesh */ 442 ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr); 443 ierr = ISDestroy(&cellPart);CHKERRQ(ierr); 444 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr); 445 /* Create new mesh */ 446 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr); 447 ierr = DMPlexSetDimension(*dmParallel, dim);CHKERRQ(ierr); 448 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr); 449 pmesh = (DM_Plex*) (*dmParallel)->data; 450 /* Distribute sieve points and the global point numbering (replaces creating remote bases) */ 451 ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr); 452 if (flg) { 453 ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr); 454 ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 455 ierr = ISView(part, NULL);CHKERRQ(ierr); 456 ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr); 457 ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr); 458 ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr); 459 } 460 ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr); 461 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 462 /* Distribute cone section */ 463 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr); 464 ierr = DMPlexGetConeSection(*dmParallel, &newConeSection);CHKERRQ(ierr); 465 ierr = PetscSFDistributeSection(pointSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr); 466 ierr = DMSetUp(*dmParallel);CHKERRQ(ierr); 467 { 468 PetscInt pStart, pEnd, p; 469 470 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr); 471 for (p = pStart; p < pEnd; ++p) { 472 PetscInt coneSize; 473 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr); 474 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize); 475 } 476 } 477 /* Communicate and renumber cones */ 478 ierr = PetscSFCreateSectionSF(pointSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr); 479 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); 480 ierr = DMPlexGetCones(*dmParallel, &newCones);CHKERRQ(ierr); 481 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 482 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 483 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr); 484 ierr = ISGlobalToLocalMappingApply(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr); 485 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr); 486 if (flg) { 487 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr); 488 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 489 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr); 490 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 491 ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr); 492 } 493 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr); 494 ierr = DMPlexGetConeOrientations(*dmParallel, &newCones);CHKERRQ(ierr); 495 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 496 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 497 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr); 498 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 499 /* Create supports and stratify sieve */ 500 { 501 PetscInt pStart, pEnd; 502 503 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr); 504 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr); 505 } 506 ierr = DMPlexSymmetrize(*dmParallel);CHKERRQ(ierr); 507 ierr = DMPlexStratify(*dmParallel);CHKERRQ(ierr); 508 /* Distribute Coordinates */ 509 { 510 PetscSection originalCoordSection, newCoordSection; 511 Vec originalCoordinates, newCoordinates; 512 const char *name; 513 514 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr); 515 ierr = DMGetCoordinateSection(*dmParallel, &newCoordSection);CHKERRQ(ierr); 516 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr); 517 ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr); 518 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr); 519 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr); 520 521 ierr = DMPlexDistributeField(dm, pointSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr); 522 ierr = DMSetCoordinatesLocal(*dmParallel, newCoordinates);CHKERRQ(ierr); 523 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr); 524 } 525 /* Distribute labels */ 526 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 527 { 528 DMLabel next = mesh->labels, newNext = pmesh->labels; 529 PetscInt numLabels = 0, l; 530 531 /* Bcast number of labels */ 532 while (next) {++numLabels; next = next->next;} 533 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr); 534 next = mesh->labels; 535 for (l = 0; l < numLabels; ++l) { 536 DMLabel labelNew; 537 PetscBool isdepth; 538 539 /* Skip "depth" because it is recreated */ 540 if (!rank) {ierr = PetscStrcmp(next->name, "depth", &isdepth);CHKERRQ(ierr);} 541 ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr); 542 if (isdepth) {if (!rank) next = next->next; continue;} 543 ierr = DMLabelDistribute(next, partSection, part, renumbering, &labelNew);CHKERRQ(ierr); 544 /* Insert into list */ 545 if (newNext) newNext->next = labelNew; 546 else pmesh->labels = labelNew; 547 newNext = labelNew; 548 if (!rank) next = next->next; 549 } 550 } 551 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 552 /* Setup hybrid structure */ 553 { 554 const PetscInt *gpoints; 555 PetscInt depth, n, d; 556 557 for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];} 558 ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr); 559 ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr); 560 ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr); 561 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 562 for (d = 0; d <= dim; ++d) { 563 PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p; 564 565 if (pmax < 0) continue; 566 ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr); 567 ierr = DMPlexGetDepthStratum(*dmParallel, d, NULL, &pEnd);CHKERRQ(ierr); 568 ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr); 569 for (p = 0; p < n; ++p) { 570 const PetscInt point = gpoints[p]; 571 572 if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax; 573 } 574 if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax; 575 else pmesh->hybridPointMax[d] = -1; 576 } 577 ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr); 578 } 579 /* Cleanup Partition */ 580 ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr); 581 ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr); 582 ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr); 583 ierr = ISDestroy(&part);CHKERRQ(ierr); 584 /* Create point SF for parallel mesh */ 585 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 586 { 587 const PetscInt *leaves; 588 PetscSFNode *remotePoints, *rowners, *lowners; 589 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints; 590 PetscInt pStart, pEnd; 591 592 ierr = DMPlexGetChart(*dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 593 ierr = PetscSFGetGraph(pointSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr); 594 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr); 595 for (p=0; p<numRoots; p++) { 596 rowners[p].rank = -1; 597 rowners[p].index = -1; 598 } 599 if (origCellPart) { 600 /* Make sure points in the original partition are not assigned to other procs */ 601 const PetscInt *origPoints; 602 603 ierr = DMPlexCreatePartitionClosure(dm, origCellPartSection, origCellPart, &origPartSection, &origPart);CHKERRQ(ierr); 604 ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr); 605 for (p = 0; p < numProcs; ++p) { 606 PetscInt dof, off, d; 607 608 ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr); 609 ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr); 610 for (d = off; d < off+dof; ++d) { 611 rowners[origPoints[d]].rank = p; 612 } 613 } 614 ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr); 615 ierr = ISDestroy(&origPart);CHKERRQ(ierr); 616 ierr = PetscSectionDestroy(&origPartSection);CHKERRQ(ierr); 617 } 618 ierr = ISDestroy(&origCellPart);CHKERRQ(ierr); 619 ierr = PetscSectionDestroy(&origCellPartSection);CHKERRQ(ierr); 620 621 ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 622 ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 623 for (p = 0; p < numLeaves; ++p) { 624 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */ 625 lowners[p].rank = rank; 626 lowners[p].index = leaves ? leaves[p] : p; 627 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */ 628 lowners[p].rank = -2; 629 lowners[p].index = -2; 630 } 631 } 632 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */ 633 rowners[p].rank = -3; 634 rowners[p].index = -3; 635 } 636 ierr = PetscSFReduceBegin(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 637 ierr = PetscSFReduceEnd(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 638 ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 639 ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 640 for (p = 0; p < numLeaves; ++p) { 641 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed"); 642 if (lowners[p].rank != rank) ++numGhostPoints; 643 } 644 ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr); 645 ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr); 646 for (p = 0, gp = 0; p < numLeaves; ++p) { 647 if (lowners[p].rank != rank) { 648 ghostPoints[gp] = leaves ? leaves[p] : p; 649 remotePoints[gp].rank = lowners[p].rank; 650 remotePoints[gp].index = lowners[p].index; 651 ++gp; 652 } 653 } 654 ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr); 655 ierr = PetscSFSetGraph((*dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 656 ierr = PetscSFSetFromOptions((*dmParallel)->sf);CHKERRQ(ierr); 657 } 658 pmesh->useCone = mesh->useCone; 659 pmesh->useClosure = mesh->useClosure; 660 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 661 /* Cleanup */ 662 if (sf) {*sf = pointSF;} 663 else {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);} 664 ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr); 665 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 666 PetscFunctionReturn(0); 667 } 668