1 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 #include <petsc/private/hashseti.h> 3 4 PetscClassId PETSCPARTITIONER_CLASSID = 0; 5 6 PetscFunctionList PetscPartitionerList = NULL; 7 PetscBool PetscPartitionerRegisterAllCalled = PETSC_FALSE; 8 9 PetscBool ChacoPartitionercite = PETSC_FALSE; 10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n" 11 " author = {Bruce Hendrickson and Robert Leland},\n" 12 " title = {A multilevel algorithm for partitioning graphs},\n" 13 " booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)}," 14 " isbn = {0-89791-816-9},\n" 15 " pages = {28},\n" 16 " doi = {http://doi.acm.org/10.1145/224170.224228},\n" 17 " publisher = {ACM Press},\n" 18 " address = {New York},\n" 19 " year = {1995}\n}\n"; 20 21 PetscBool ParMetisPartitionercite = PETSC_FALSE; 22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n" 23 " author = {George Karypis and Vipin Kumar},\n" 24 " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n" 25 " journal = {Journal of Parallel and Distributed Computing},\n" 26 " volume = {48},\n" 27 " pages = {71--85},\n" 28 " year = {1998}\n}\n"; 29 30 /*@C 31 DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner 32 33 Input Parameters: 34 + dm - The mesh DM dm 35 - height - Height of the strata from which to construct the graph 36 37 Output Parameter: 38 + numVertices - Number of vertices in the graph 39 . offsets - Point offsets in the graph 40 . adjacency - Point connectivity in the graph 41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency". Negative indicates that the cell is a duplicate from another process. 42 43 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function 44 representation on the mesh. 45 46 Level: developer 47 48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency() 49 @*/ 50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering) 51 { 52 PetscInt dim, depth, p, pStart, pEnd, a, adjSize, idx, size; 53 PetscInt *adj = NULL, *vOffsets = NULL, *graph = NULL; 54 IS cellNumbering; 55 const PetscInt *cellNum; 56 PetscBool useCone, useClosure; 57 PetscSection section; 58 PetscSegBuffer adjBuffer; 59 PetscSF sfPoint; 60 PetscInt *adjCells = NULL, *remoteCells = NULL; 61 const PetscInt *local; 62 PetscInt nroots, nleaves, l; 63 PetscMPIInt rank; 64 PetscErrorCode ierr; 65 66 PetscFunctionBegin; 67 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 68 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 69 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 70 if (dim != depth) { 71 /* We do not handle the uninterpolated case here */ 72 ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr); 73 /* DMPlexCreateNeighborCSR does not make a numbering */ 74 if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);} 75 /* Different behavior for empty graphs */ 76 if (!*numVertices) { 77 ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr); 78 (*offsets)[0] = 0; 79 } 80 /* Broken in parallel */ 81 if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported"); 82 PetscFunctionReturn(0); 83 } 84 ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr); 85 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 86 ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 87 /* Build adjacency graph via a section/segbuffer */ 88 ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), §ion);CHKERRQ(ierr); 89 ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr); 90 ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr); 91 /* Always use FVM adjacency to create partitioner graph */ 92 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr); 93 ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr); 94 ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr); 95 if (globalNumbering) { 96 ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr); 97 *globalNumbering = cellNumbering; 98 } 99 ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr); 100 /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells 101 Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves) 102 */ 103 ierr = PetscSFGetGraph(dm->sf, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr); 104 if (nroots >= 0) { 105 PetscInt fStart, fEnd, f; 106 107 ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr); 108 ierr = DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd);CHKERRQ(ierr); 109 for (l = 0; l < nroots; ++l) adjCells[l] = -3; 110 for (f = fStart; f < fEnd; ++f) { 111 const PetscInt *support; 112 PetscInt supportSize; 113 114 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 115 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 116 if (supportSize == 1) adjCells[f] = cellNum[support[0]]; 117 else if (supportSize == 2) { 118 ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr); 119 if (p >= 0) adjCells[f] = cellNum[support[1]]; 120 ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr); 121 if (p >= 0) adjCells[f] = cellNum[support[0]]; 122 } 123 } 124 for (l = 0; l < nroots; ++l) remoteCells[l] = -1; 125 ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr); 126 ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr); 127 ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr); 128 ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr); 129 } 130 /* Combine local and global adjacencies */ 131 for (*numVertices = 0, p = pStart; p < pEnd; p++) { 132 const PetscInt *cone; 133 PetscInt coneSize, c; 134 135 /* Skip non-owned cells in parallel (ParMetis expects no overlap) */ 136 if (nroots > 0) {if (cellNum[p] < 0) continue;} 137 /* Add remote cells */ 138 if (remoteCells) { 139 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 140 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 141 for (c = 0; c < coneSize; ++c) { 142 if (remoteCells[cone[c]] != -1) { 143 PetscInt *PETSC_RESTRICT pBuf; 144 145 ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr); 146 ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr); 147 *pBuf = remoteCells[cone[c]]; 148 } 149 } 150 } 151 /* Add local cells */ 152 adjSize = PETSC_DETERMINE; 153 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 154 for (a = 0; a < adjSize; ++a) { 155 const PetscInt point = adj[a]; 156 if (point != p && pStart <= point && point < pEnd) { 157 PetscInt *PETSC_RESTRICT pBuf; 158 ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr); 159 ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr); 160 *pBuf = cellNum[point]; 161 } 162 } 163 (*numVertices)++; 164 } 165 ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr); 166 ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr); 167 /* Derive CSR graph from section/segbuffer */ 168 ierr = PetscSectionSetUp(section);CHKERRQ(ierr); 169 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 170 ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr); 171 for (idx = 0, p = pStart; p < pEnd; p++) { 172 if (nroots > 0) {if (cellNum[p] < 0) continue;} 173 ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr); 174 } 175 vOffsets[*numVertices] = size; 176 if (offsets) *offsets = vOffsets; 177 ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr); 178 ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr); 179 ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr); 180 if (adjacency) *adjacency = graph; 181 /* Clean up */ 182 ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr); 183 ierr = PetscSectionDestroy(§ion);CHKERRQ(ierr); 184 ierr = PetscFree(adj);CHKERRQ(ierr); 185 PetscFunctionReturn(0); 186 } 187 188 /*@C 189 DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format. 190 191 Collective 192 193 Input Arguments: 194 + dm - The DMPlex 195 - cellHeight - The height of mesh points to treat as cells (default should be 0) 196 197 Output Arguments: 198 + numVertices - The number of local vertices in the graph, or cells in the mesh. 199 . offsets - The offset to the adjacency list for each cell 200 - adjacency - The adjacency list for all cells 201 202 Note: This is suitable for input to a mesh partitioner like ParMetis. 203 204 Level: advanced 205 206 .seealso: DMPlexCreate() 207 @*/ 208 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency) 209 { 210 const PetscInt maxFaceCases = 30; 211 PetscInt numFaceCases = 0; 212 PetscInt numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */ 213 PetscInt *off, *adj; 214 PetscInt *neighborCells = NULL; 215 PetscInt dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell; 216 PetscErrorCode ierr; 217 218 PetscFunctionBegin; 219 /* For parallel partitioning, I think you have to communicate supports */ 220 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 221 cellDim = dim - cellHeight; 222 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 223 ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr); 224 if (cEnd - cStart == 0) { 225 if (numVertices) *numVertices = 0; 226 if (offsets) *offsets = NULL; 227 if (adjacency) *adjacency = NULL; 228 PetscFunctionReturn(0); 229 } 230 numCells = cEnd - cStart; 231 faceDepth = depth - cellHeight; 232 if (dim == depth) { 233 PetscInt f, fStart, fEnd; 234 235 ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr); 236 /* Count neighboring cells */ 237 ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr); 238 for (f = fStart; f < fEnd; ++f) { 239 const PetscInt *support; 240 PetscInt supportSize; 241 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 242 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 243 if (supportSize == 2) { 244 PetscInt numChildren; 245 246 ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr); 247 if (!numChildren) { 248 ++off[support[0]-cStart+1]; 249 ++off[support[1]-cStart+1]; 250 } 251 } 252 } 253 /* Prefix sum */ 254 for (c = 1; c <= numCells; ++c) off[c] += off[c-1]; 255 if (adjacency) { 256 PetscInt *tmp; 257 258 ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr); 259 ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr); 260 ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr); 261 /* Get neighboring cells */ 262 for (f = fStart; f < fEnd; ++f) { 263 const PetscInt *support; 264 PetscInt supportSize; 265 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 266 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 267 if (supportSize == 2) { 268 PetscInt numChildren; 269 270 ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr); 271 if (!numChildren) { 272 adj[tmp[support[0]-cStart]++] = support[1]; 273 adj[tmp[support[1]-cStart]++] = support[0]; 274 } 275 } 276 } 277 #if defined(PETSC_USE_DEBUG) 278 for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart); 279 #endif 280 ierr = PetscFree(tmp);CHKERRQ(ierr); 281 } 282 if (numVertices) *numVertices = numCells; 283 if (offsets) *offsets = off; 284 if (adjacency) *adjacency = adj; 285 PetscFunctionReturn(0); 286 } 287 /* Setup face recognition */ 288 if (faceDepth == 1) { 289 PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */ 290 291 for (c = cStart; c < cEnd; ++c) { 292 PetscInt corners; 293 294 ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr); 295 if (!cornersSeen[corners]) { 296 PetscInt nFV; 297 298 if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases"); 299 cornersSeen[corners] = 1; 300 301 ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr); 302 303 numFaceVertices[numFaceCases++] = nFV; 304 } 305 } 306 } 307 ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr); 308 /* Count neighboring cells */ 309 for (cell = cStart; cell < cEnd; ++cell) { 310 PetscInt numNeighbors = PETSC_DETERMINE, n; 311 312 ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr); 313 /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */ 314 for (n = 0; n < numNeighbors; ++n) { 315 PetscInt cellPair[2]; 316 PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE; 317 PetscInt meetSize = 0; 318 const PetscInt *meet = NULL; 319 320 cellPair[0] = cell; cellPair[1] = neighborCells[n]; 321 if (cellPair[0] == cellPair[1]) continue; 322 if (!found) { 323 ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 324 if (meetSize) { 325 PetscInt f; 326 327 for (f = 0; f < numFaceCases; ++f) { 328 if (numFaceVertices[f] == meetSize) { 329 found = PETSC_TRUE; 330 break; 331 } 332 } 333 } 334 ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 335 } 336 if (found) ++off[cell-cStart+1]; 337 } 338 } 339 /* Prefix sum */ 340 for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1]; 341 342 if (adjacency) { 343 ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr); 344 /* Get neighboring cells */ 345 for (cell = cStart; cell < cEnd; ++cell) { 346 PetscInt numNeighbors = PETSC_DETERMINE, n; 347 PetscInt cellOffset = 0; 348 349 ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr); 350 /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */ 351 for (n = 0; n < numNeighbors; ++n) { 352 PetscInt cellPair[2]; 353 PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE; 354 PetscInt meetSize = 0; 355 const PetscInt *meet = NULL; 356 357 cellPair[0] = cell; cellPair[1] = neighborCells[n]; 358 if (cellPair[0] == cellPair[1]) continue; 359 if (!found) { 360 ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 361 if (meetSize) { 362 PetscInt f; 363 364 for (f = 0; f < numFaceCases; ++f) { 365 if (numFaceVertices[f] == meetSize) { 366 found = PETSC_TRUE; 367 break; 368 } 369 } 370 } 371 ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 372 } 373 if (found) { 374 adj[off[cell-cStart]+cellOffset] = neighborCells[n]; 375 ++cellOffset; 376 } 377 } 378 } 379 } 380 ierr = PetscFree(neighborCells);CHKERRQ(ierr); 381 if (numVertices) *numVertices = numCells; 382 if (offsets) *offsets = off; 383 if (adjacency) *adjacency = adj; 384 PetscFunctionReturn(0); 385 } 386 387 /*@C 388 PetscPartitionerRegister - Adds a new PetscPartitioner implementation 389 390 Not Collective 391 392 Input Parameters: 393 + name - The name of a new user-defined creation routine 394 - create_func - The creation routine itself 395 396 Notes: 397 PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners 398 399 Sample usage: 400 .vb 401 PetscPartitionerRegister("my_part", MyPetscPartitionerCreate); 402 .ve 403 404 Then, your PetscPartitioner type can be chosen with the procedural interface via 405 .vb 406 PetscPartitionerCreate(MPI_Comm, PetscPartitioner *); 407 PetscPartitionerSetType(PetscPartitioner, "my_part"); 408 .ve 409 or at runtime via the option 410 .vb 411 -petscpartitioner_type my_part 412 .ve 413 414 Level: advanced 415 416 .keywords: PetscPartitioner, register 417 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy() 418 419 @*/ 420 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner)) 421 { 422 PetscErrorCode ierr; 423 424 PetscFunctionBegin; 425 ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr); 426 PetscFunctionReturn(0); 427 } 428 429 /*@C 430 PetscPartitionerSetType - Builds a particular PetscPartitioner 431 432 Collective on PetscPartitioner 433 434 Input Parameters: 435 + part - The PetscPartitioner object 436 - name - The kind of partitioner 437 438 Options Database Key: 439 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types 440 441 Level: intermediate 442 443 .keywords: PetscPartitioner, set, type 444 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate() 445 @*/ 446 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name) 447 { 448 PetscErrorCode (*r)(PetscPartitioner); 449 PetscBool match; 450 PetscErrorCode ierr; 451 452 PetscFunctionBegin; 453 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 454 ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr); 455 if (match) PetscFunctionReturn(0); 456 457 ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr); 458 ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr); 459 if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name); 460 461 if (part->ops->destroy) { 462 ierr = (*part->ops->destroy)(part);CHKERRQ(ierr); 463 } 464 ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr); 465 ierr = (*r)(part);CHKERRQ(ierr); 466 ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr); 467 PetscFunctionReturn(0); 468 } 469 470 /*@C 471 PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object. 472 473 Not Collective 474 475 Input Parameter: 476 . part - The PetscPartitioner 477 478 Output Parameter: 479 . name - The PetscPartitioner type name 480 481 Level: intermediate 482 483 .keywords: PetscPartitioner, get, type, name 484 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate() 485 @*/ 486 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name) 487 { 488 PetscErrorCode ierr; 489 490 PetscFunctionBegin; 491 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 492 PetscValidPointer(name, 2); 493 ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr); 494 *name = ((PetscObject) part)->type_name; 495 PetscFunctionReturn(0); 496 } 497 498 /*@C 499 PetscPartitionerView - Views a PetscPartitioner 500 501 Collective on PetscPartitioner 502 503 Input Parameter: 504 + part - the PetscPartitioner object to view 505 - v - the viewer 506 507 Level: developer 508 509 .seealso: PetscPartitionerDestroy() 510 @*/ 511 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v) 512 { 513 PetscMPIInt size; 514 PetscBool isascii; 515 PetscErrorCode ierr; 516 517 PetscFunctionBegin; 518 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 519 if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);} 520 ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr); 521 if (isascii) { 522 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr); 523 ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr); 524 ierr = PetscViewerASCIIPrintf(v, " type: %s\n", part->hdr.type_name);CHKERRQ(ierr); 525 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 526 ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr); 527 ierr = PetscViewerASCIIPrintf(v, "balance: %.2g\n", part->balance);CHKERRQ(ierr); 528 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 529 } 530 if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);} 531 PetscFunctionReturn(0); 532 } 533 534 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType) 535 { 536 PetscFunctionBegin; 537 if (!currentType) { 538 #if defined(PETSC_HAVE_CHACO) 539 *defaultType = PETSCPARTITIONERCHACO; 540 #elif defined(PETSC_HAVE_PARMETIS) 541 *defaultType = PETSCPARTITIONERPARMETIS; 542 #elif defined(PETSC_HAVE_PTSCOTCH) 543 *defaultType = PETSCPARTITIONERPTSCOTCH; 544 #else 545 *defaultType = PETSCPARTITIONERSIMPLE; 546 #endif 547 } else { 548 *defaultType = currentType; 549 } 550 PetscFunctionReturn(0); 551 } 552 553 /*@ 554 PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database 555 556 Collective on PetscPartitioner 557 558 Input Parameter: 559 . part - the PetscPartitioner object to set options for 560 561 Level: developer 562 563 .seealso: PetscPartitionerView() 564 @*/ 565 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part) 566 { 567 const char *defaultType; 568 char name[256]; 569 PetscBool flg; 570 PetscErrorCode ierr; 571 572 PetscFunctionBegin; 573 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 574 ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr); 575 ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr); 576 ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr); 577 ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr); 578 if (flg) { 579 ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr); 580 } else if (!((PetscObject) part)->type_name) { 581 ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr); 582 } 583 if (part->ops->setfromoptions) { 584 ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr); 585 } 586 ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr); 587 ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr); 588 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 589 ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr); 590 ierr = PetscOptionsEnd();CHKERRQ(ierr); 591 PetscFunctionReturn(0); 592 } 593 594 /*@C 595 PetscPartitionerSetUp - Construct data structures for the PetscPartitioner 596 597 Collective on PetscPartitioner 598 599 Input Parameter: 600 . part - the PetscPartitioner object to setup 601 602 Level: developer 603 604 .seealso: PetscPartitionerView(), PetscPartitionerDestroy() 605 @*/ 606 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part) 607 { 608 PetscErrorCode ierr; 609 610 PetscFunctionBegin; 611 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 612 if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);} 613 PetscFunctionReturn(0); 614 } 615 616 /*@ 617 PetscPartitionerDestroy - Destroys a PetscPartitioner object 618 619 Collective on PetscPartitioner 620 621 Input Parameter: 622 . part - the PetscPartitioner object to destroy 623 624 Level: developer 625 626 .seealso: PetscPartitionerView() 627 @*/ 628 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part) 629 { 630 PetscErrorCode ierr; 631 632 PetscFunctionBegin; 633 if (!*part) PetscFunctionReturn(0); 634 PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1); 635 636 if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);} 637 ((PetscObject) (*part))->refct = 0; 638 639 ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr); 640 if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);} 641 ierr = PetscHeaderDestroy(part);CHKERRQ(ierr); 642 PetscFunctionReturn(0); 643 } 644 645 /*@ 646 PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType(). 647 648 Collective on MPI_Comm 649 650 Input Parameter: 651 . comm - The communicator for the PetscPartitioner object 652 653 Output Parameter: 654 . part - The PetscPartitioner object 655 656 Level: beginner 657 658 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER 659 @*/ 660 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part) 661 { 662 PetscPartitioner p; 663 const char *partitionerType = NULL; 664 PetscErrorCode ierr; 665 666 PetscFunctionBegin; 667 PetscValidPointer(part, 2); 668 *part = NULL; 669 ierr = DMInitializePackage();CHKERRQ(ierr); 670 671 ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr); 672 ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr); 673 ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr); 674 675 p->edgeCut = 0; 676 p->balance = 0.0; 677 678 *part = p; 679 PetscFunctionReturn(0); 680 } 681 682 /*@ 683 PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh 684 685 Collective on DM 686 687 Input Parameters: 688 + part - The PetscPartitioner 689 - dm - The mesh DM 690 691 Output Parameters: 692 + partSection - The PetscSection giving the division of points by partition 693 - partition - The list of points by partition 694 695 Options Database: 696 . -petscpartitioner_view_graph - View the graph we are partitioning 697 698 Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight() 699 700 Level: developer 701 702 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate() 703 @*/ 704 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition) 705 { 706 PetscMPIInt size; 707 PetscErrorCode ierr; 708 709 PetscFunctionBegin; 710 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 711 PetscValidHeaderSpecific(dm, DM_CLASSID, 2); 712 PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4); 713 PetscValidPointer(partition, 5); 714 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr); 715 if (size == 1) { 716 PetscInt *points; 717 PetscInt cStart, cEnd, c; 718 719 ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr); 720 ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr); 721 ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr); 722 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 723 ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr); 724 for (c = cStart; c < cEnd; ++c) points[c] = c; 725 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 726 PetscFunctionReturn(0); 727 } 728 if (part->height == 0) { 729 PetscInt numVertices; 730 PetscInt *start = NULL; 731 PetscInt *adjacency = NULL; 732 IS globalNumbering; 733 734 ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr); 735 if (part->viewGraph) { 736 PetscViewer viewer = part->viewerGraph; 737 PetscBool isascii; 738 PetscInt v, i; 739 PetscMPIInt rank; 740 741 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr); 742 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr); 743 if (isascii) { 744 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 745 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr); 746 for (v = 0; v < numVertices; ++v) { 747 const PetscInt s = start[v]; 748 const PetscInt e = start[v+1]; 749 750 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d] ", rank);CHKERRQ(ierr); 751 for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);} 752 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr); 753 } 754 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 755 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 756 } 757 } 758 if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type"); 759 ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr); 760 ierr = PetscFree(start);CHKERRQ(ierr); 761 ierr = PetscFree(adjacency);CHKERRQ(ierr); 762 if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */ 763 const PetscInt *globalNum; 764 const PetscInt *partIdx; 765 PetscInt *map, cStart, cEnd; 766 PetscInt *adjusted, i, localSize, offset; 767 IS newPartition; 768 769 ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr); 770 ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr); 771 ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr); 772 ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr); 773 ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr); 774 ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr); 775 for (i = cStart, offset = 0; i < cEnd; i++) { 776 if (globalNum[i - cStart] >= 0) map[offset++] = i; 777 } 778 for (i = 0; i < localSize; i++) { 779 adjusted[i] = map[partIdx[i]]; 780 } 781 ierr = PetscFree(map);CHKERRQ(ierr); 782 ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr); 783 ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr); 784 ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr); 785 ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr); 786 ierr = ISDestroy(partition);CHKERRQ(ierr); 787 *partition = newPartition; 788 } 789 } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height); 790 ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr); 791 PetscFunctionReturn(0); 792 } 793 794 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part) 795 { 796 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 797 PetscErrorCode ierr; 798 799 PetscFunctionBegin; 800 ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr); 801 ierr = ISDestroy(&p->partition);CHKERRQ(ierr); 802 ierr = PetscFree(p);CHKERRQ(ierr); 803 PetscFunctionReturn(0); 804 } 805 806 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer) 807 { 808 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 809 PetscErrorCode ierr; 810 811 PetscFunctionBegin; 812 if (p->random) { 813 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 814 ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr); 815 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 816 } 817 PetscFunctionReturn(0); 818 } 819 820 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer) 821 { 822 PetscBool iascii; 823 PetscErrorCode ierr; 824 825 PetscFunctionBegin; 826 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 827 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 828 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 829 if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);} 830 PetscFunctionReturn(0); 831 } 832 833 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part) 834 { 835 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 836 PetscErrorCode ierr; 837 838 PetscFunctionBegin; 839 ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr); 840 ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr); 841 ierr = PetscOptionsTail();CHKERRQ(ierr); 842 PetscFunctionReturn(0); 843 } 844 845 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 846 { 847 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 848 PetscInt np; 849 PetscErrorCode ierr; 850 851 PetscFunctionBegin; 852 if (p->random) { 853 PetscRandom r; 854 PetscInt *sizes, *points, v, p; 855 PetscMPIInt rank; 856 857 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 858 ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr); 859 ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr); 860 ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr); 861 ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr); 862 for (v = 0; v < numVertices; ++v) {points[v] = v;} 863 for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);} 864 for (v = numVertices-1; v > 0; --v) { 865 PetscReal val; 866 PetscInt w, tmp; 867 868 ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr); 869 ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr); 870 w = PetscFloorReal(val); 871 tmp = points[v]; 872 points[v] = points[w]; 873 points[w] = tmp; 874 } 875 ierr = PetscRandomDestroy(&r);CHKERRQ(ierr); 876 ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr); 877 ierr = PetscFree2(sizes, points);CHKERRQ(ierr); 878 } 879 if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()"); 880 ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr); 881 if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np); 882 ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr); 883 if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np); 884 ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr); 885 *partition = p->partition; 886 ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr); 887 PetscFunctionReturn(0); 888 } 889 890 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part) 891 { 892 PetscFunctionBegin; 893 part->ops->view = PetscPartitionerView_Shell; 894 part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell; 895 part->ops->destroy = PetscPartitionerDestroy_Shell; 896 part->ops->partition = PetscPartitionerPartition_Shell; 897 PetscFunctionReturn(0); 898 } 899 900 /*MC 901 PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object 902 903 Level: intermediate 904 905 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 906 M*/ 907 908 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part) 909 { 910 PetscPartitioner_Shell *p; 911 PetscErrorCode ierr; 912 913 PetscFunctionBegin; 914 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 915 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 916 part->data = p; 917 918 ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr); 919 p->random = PETSC_FALSE; 920 PetscFunctionReturn(0); 921 } 922 923 /*@C 924 PetscPartitionerShellSetPartition - Set an artifical partition for a mesh 925 926 Collective on Part 927 928 Input Parameters: 929 + part - The PetscPartitioner 930 . size - The number of partitions 931 . sizes - array of size size (or NULL) providing the number of points in each partition 932 - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.) 933 934 Level: developer 935 936 Notes: 937 938 It is safe to free the sizes and points arrays after use in this routine. 939 940 .seealso DMPlexDistribute(), PetscPartitionerCreate() 941 @*/ 942 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[]) 943 { 944 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 945 PetscInt proc, numPoints; 946 PetscErrorCode ierr; 947 948 PetscFunctionBegin; 949 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 950 if (sizes) {PetscValidPointer(sizes, 3);} 951 if (points) {PetscValidPointer(points, 4);} 952 ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr); 953 ierr = ISDestroy(&p->partition);CHKERRQ(ierr); 954 ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr); 955 ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr); 956 if (sizes) { 957 for (proc = 0; proc < size; ++proc) { 958 ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr); 959 } 960 } 961 ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr); 962 ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr); 963 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr); 964 PetscFunctionReturn(0); 965 } 966 967 /*@ 968 PetscPartitionerShellSetRandom - Set the flag to use a random partition 969 970 Collective on Part 971 972 Input Parameters: 973 + part - The PetscPartitioner 974 - random - The flag to use a random partition 975 976 Level: intermediate 977 978 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate() 979 @*/ 980 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random) 981 { 982 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 983 984 PetscFunctionBegin; 985 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 986 p->random = random; 987 PetscFunctionReturn(0); 988 } 989 990 /*@ 991 PetscPartitionerShellGetRandom - get the flag to use a random partition 992 993 Collective on Part 994 995 Input Parameter: 996 . part - The PetscPartitioner 997 998 Output Parameter 999 . random - The flag to use a random partition 1000 1001 Level: intermediate 1002 1003 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate() 1004 @*/ 1005 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random) 1006 { 1007 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 1008 1009 PetscFunctionBegin; 1010 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1011 PetscValidPointer(random, 2); 1012 *random = p->random; 1013 PetscFunctionReturn(0); 1014 } 1015 1016 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part) 1017 { 1018 PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data; 1019 PetscErrorCode ierr; 1020 1021 PetscFunctionBegin; 1022 ierr = PetscFree(p);CHKERRQ(ierr); 1023 PetscFunctionReturn(0); 1024 } 1025 1026 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer) 1027 { 1028 PetscFunctionBegin; 1029 PetscFunctionReturn(0); 1030 } 1031 1032 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer) 1033 { 1034 PetscBool iascii; 1035 PetscErrorCode ierr; 1036 1037 PetscFunctionBegin; 1038 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1039 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1040 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1041 if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);} 1042 PetscFunctionReturn(0); 1043 } 1044 1045 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1046 { 1047 MPI_Comm comm; 1048 PetscInt np; 1049 PetscMPIInt size; 1050 PetscErrorCode ierr; 1051 1052 PetscFunctionBegin; 1053 comm = PetscObjectComm((PetscObject)part); 1054 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1055 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1056 ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr); 1057 if (size == 1) { 1058 for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);} 1059 } else { 1060 PetscMPIInt rank; 1061 PetscInt nvGlobal, *offsets, myFirst, myLast; 1062 1063 ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr); 1064 offsets[0] = 0; 1065 ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr); 1066 for (np = 2; np <= size; np++) { 1067 offsets[np] += offsets[np-1]; 1068 } 1069 nvGlobal = offsets[size]; 1070 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1071 myFirst = offsets[rank]; 1072 myLast = offsets[rank + 1] - 1; 1073 ierr = PetscFree(offsets);CHKERRQ(ierr); 1074 if (numVertices) { 1075 PetscInt firstPart = 0, firstLargePart = 0; 1076 PetscInt lastPart = 0, lastLargePart = 0; 1077 PetscInt rem = nvGlobal % nparts; 1078 PetscInt pSmall = nvGlobal/nparts; 1079 PetscInt pBig = nvGlobal/nparts + 1; 1080 1081 1082 if (rem) { 1083 firstLargePart = myFirst / pBig; 1084 lastLargePart = myLast / pBig; 1085 1086 if (firstLargePart < rem) { 1087 firstPart = firstLargePart; 1088 } else { 1089 firstPart = rem + (myFirst - (rem * pBig)) / pSmall; 1090 } 1091 if (lastLargePart < rem) { 1092 lastPart = lastLargePart; 1093 } else { 1094 lastPart = rem + (myLast - (rem * pBig)) / pSmall; 1095 } 1096 } else { 1097 firstPart = myFirst / (nvGlobal/nparts); 1098 lastPart = myLast / (nvGlobal/nparts); 1099 } 1100 1101 for (np = firstPart; np <= lastPart; np++) { 1102 PetscInt PartStart = np * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np); 1103 PetscInt PartEnd = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1); 1104 1105 PartStart = PetscMax(PartStart,myFirst); 1106 PartEnd = PetscMin(PartEnd,myLast+1); 1107 ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr); 1108 } 1109 } 1110 } 1111 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1112 PetscFunctionReturn(0); 1113 } 1114 1115 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part) 1116 { 1117 PetscFunctionBegin; 1118 part->ops->view = PetscPartitionerView_Simple; 1119 part->ops->destroy = PetscPartitionerDestroy_Simple; 1120 part->ops->partition = PetscPartitionerPartition_Simple; 1121 PetscFunctionReturn(0); 1122 } 1123 1124 /*MC 1125 PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object 1126 1127 Level: intermediate 1128 1129 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1130 M*/ 1131 1132 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part) 1133 { 1134 PetscPartitioner_Simple *p; 1135 PetscErrorCode ierr; 1136 1137 PetscFunctionBegin; 1138 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1139 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1140 part->data = p; 1141 1142 ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr); 1143 PetscFunctionReturn(0); 1144 } 1145 1146 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part) 1147 { 1148 PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data; 1149 PetscErrorCode ierr; 1150 1151 PetscFunctionBegin; 1152 ierr = PetscFree(p);CHKERRQ(ierr); 1153 PetscFunctionReturn(0); 1154 } 1155 1156 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer) 1157 { 1158 PetscFunctionBegin; 1159 PetscFunctionReturn(0); 1160 } 1161 1162 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer) 1163 { 1164 PetscBool iascii; 1165 PetscErrorCode ierr; 1166 1167 PetscFunctionBegin; 1168 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1169 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1170 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1171 if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);} 1172 PetscFunctionReturn(0); 1173 } 1174 1175 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1176 { 1177 PetscInt np; 1178 PetscErrorCode ierr; 1179 1180 PetscFunctionBegin; 1181 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1182 ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr); 1183 ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr); 1184 for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);} 1185 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1186 PetscFunctionReturn(0); 1187 } 1188 1189 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part) 1190 { 1191 PetscFunctionBegin; 1192 part->ops->view = PetscPartitionerView_Gather; 1193 part->ops->destroy = PetscPartitionerDestroy_Gather; 1194 part->ops->partition = PetscPartitionerPartition_Gather; 1195 PetscFunctionReturn(0); 1196 } 1197 1198 /*MC 1199 PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object 1200 1201 Level: intermediate 1202 1203 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1204 M*/ 1205 1206 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part) 1207 { 1208 PetscPartitioner_Gather *p; 1209 PetscErrorCode ierr; 1210 1211 PetscFunctionBegin; 1212 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1213 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1214 part->data = p; 1215 1216 ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr); 1217 PetscFunctionReturn(0); 1218 } 1219 1220 1221 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part) 1222 { 1223 PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data; 1224 PetscErrorCode ierr; 1225 1226 PetscFunctionBegin; 1227 ierr = PetscFree(p);CHKERRQ(ierr); 1228 PetscFunctionReturn(0); 1229 } 1230 1231 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer) 1232 { 1233 PetscFunctionBegin; 1234 PetscFunctionReturn(0); 1235 } 1236 1237 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer) 1238 { 1239 PetscBool iascii; 1240 PetscErrorCode ierr; 1241 1242 PetscFunctionBegin; 1243 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1244 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1245 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1246 if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);} 1247 PetscFunctionReturn(0); 1248 } 1249 1250 #if defined(PETSC_HAVE_CHACO) 1251 #if defined(PETSC_HAVE_UNISTD_H) 1252 #include <unistd.h> 1253 #endif 1254 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT) 1255 #include <chaco.h> 1256 #else 1257 /* Older versions of Chaco do not have an include file */ 1258 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts, 1259 float *ewgts, float *x, float *y, float *z, char *outassignname, 1260 char *outfilename, short *assignment, int architecture, int ndims_tot, 1261 int mesh_dims[3], double *goal, int global_method, int local_method, 1262 int rqi_flag, int vmax, int ndims, double eigtol, long seed); 1263 #endif 1264 extern int FREE_GRAPH; 1265 #endif 1266 1267 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1268 { 1269 #if defined(PETSC_HAVE_CHACO) 1270 enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3}; 1271 MPI_Comm comm; 1272 int nvtxs = numVertices; /* number of vertices in full graph */ 1273 int *vwgts = NULL; /* weights for all vertices */ 1274 float *ewgts = NULL; /* weights for all edges */ 1275 float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */ 1276 char *outassignname = NULL; /* name of assignment output file */ 1277 char *outfilename = NULL; /* output file name */ 1278 int architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */ 1279 int ndims_tot = 0; /* total number of cube dimensions to divide */ 1280 int mesh_dims[3]; /* dimensions of mesh of processors */ 1281 double *goal = NULL; /* desired set sizes for each set */ 1282 int global_method = 1; /* global partitioning algorithm */ 1283 int local_method = 1; /* local partitioning algorithm */ 1284 int rqi_flag = 0; /* should I use RQI/Symmlq eigensolver? */ 1285 int vmax = 200; /* how many vertices to coarsen down to? */ 1286 int ndims = 1; /* number of eigenvectors (2^d sets) */ 1287 double eigtol = 0.001; /* tolerance on eigenvectors */ 1288 long seed = 123636512; /* for random graph mutations */ 1289 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT) 1290 int *assignment; /* Output partition */ 1291 #else 1292 short int *assignment; /* Output partition */ 1293 #endif 1294 int fd_stdout, fd_pipe[2]; 1295 PetscInt *points; 1296 int i, v, p; 1297 PetscErrorCode ierr; 1298 1299 PetscFunctionBegin; 1300 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1301 #if defined (PETSC_USE_DEBUG) 1302 { 1303 int ival,isum; 1304 PetscBool distributed; 1305 1306 ival = (numVertices > 0); 1307 ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr); 1308 distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE; 1309 if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph"); 1310 } 1311 #endif 1312 if (!numVertices) { 1313 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1314 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1315 ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1316 PetscFunctionReturn(0); 1317 } 1318 FREE_GRAPH = 0; /* Do not let Chaco free my memory */ 1319 for (i = 0; i < start[numVertices]; ++i) ++adjacency[i]; 1320 1321 if (global_method == INERTIAL_METHOD) { 1322 /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */ 1323 SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported"); 1324 } 1325 mesh_dims[0] = nparts; 1326 mesh_dims[1] = 1; 1327 mesh_dims[2] = 1; 1328 ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr); 1329 /* Chaco outputs to stdout. We redirect this to a buffer. */ 1330 /* TODO: check error codes for UNIX calls */ 1331 #if defined(PETSC_HAVE_UNISTD_H) 1332 { 1333 int piperet; 1334 piperet = pipe(fd_pipe); 1335 if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe"); 1336 fd_stdout = dup(1); 1337 close(1); 1338 dup2(fd_pipe[1], 1); 1339 } 1340 #endif 1341 ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename, 1342 assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag, 1343 vmax, ndims, eigtol, seed); 1344 #if defined(PETSC_HAVE_UNISTD_H) 1345 { 1346 char msgLog[10000]; 1347 int count; 1348 1349 fflush(stdout); 1350 count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char)); 1351 if (count < 0) count = 0; 1352 msgLog[count] = 0; 1353 close(1); 1354 dup2(fd_stdout, 1); 1355 close(fd_stdout); 1356 close(fd_pipe[0]); 1357 close(fd_pipe[1]); 1358 if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog); 1359 } 1360 #else 1361 if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout"); 1362 #endif 1363 /* Convert to PetscSection+IS */ 1364 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1365 for (v = 0; v < nvtxs; ++v) { 1366 ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr); 1367 } 1368 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1369 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1370 for (p = 0, i = 0; p < nparts; ++p) { 1371 for (v = 0; v < nvtxs; ++v) { 1372 if (assignment[v] == p) points[i++] = v; 1373 } 1374 } 1375 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1376 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1377 if (global_method == INERTIAL_METHOD) { 1378 /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */ 1379 } 1380 ierr = PetscFree(assignment);CHKERRQ(ierr); 1381 for (i = 0; i < start[numVertices]; ++i) --adjacency[i]; 1382 PetscFunctionReturn(0); 1383 #else 1384 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco."); 1385 #endif 1386 } 1387 1388 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part) 1389 { 1390 PetscFunctionBegin; 1391 part->ops->view = PetscPartitionerView_Chaco; 1392 part->ops->destroy = PetscPartitionerDestroy_Chaco; 1393 part->ops->partition = PetscPartitionerPartition_Chaco; 1394 PetscFunctionReturn(0); 1395 } 1396 1397 /*MC 1398 PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library 1399 1400 Level: intermediate 1401 1402 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1403 M*/ 1404 1405 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part) 1406 { 1407 PetscPartitioner_Chaco *p; 1408 PetscErrorCode ierr; 1409 1410 PetscFunctionBegin; 1411 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1412 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1413 part->data = p; 1414 1415 ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr); 1416 ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr); 1417 PetscFunctionReturn(0); 1418 } 1419 1420 static const char *ptypes[] = {"kway", "rb"}; 1421 1422 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part) 1423 { 1424 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 1425 PetscErrorCode ierr; 1426 1427 PetscFunctionBegin; 1428 ierr = PetscFree(p);CHKERRQ(ierr); 1429 PetscFunctionReturn(0); 1430 } 1431 1432 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer) 1433 { 1434 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 1435 PetscErrorCode ierr; 1436 1437 PetscFunctionBegin; 1438 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1439 ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr); 1440 ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr); 1441 ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr); 1442 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1443 PetscFunctionReturn(0); 1444 } 1445 1446 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer) 1447 { 1448 PetscBool iascii; 1449 PetscErrorCode ierr; 1450 1451 PetscFunctionBegin; 1452 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1453 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1454 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1455 if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);} 1456 PetscFunctionReturn(0); 1457 } 1458 1459 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part) 1460 { 1461 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 1462 PetscInt p_randomSeed = -1; /* TODO: Add this to PetscPartitioner_ParMetis */ 1463 PetscErrorCode ierr; 1464 1465 PetscFunctionBegin; 1466 ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr); 1467 ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr); 1468 ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr); 1469 ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr); 1470 ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p_randomSeed, &p_randomSeed, NULL);CHKERRQ(ierr); 1471 ierr = PetscOptionsTail();CHKERRQ(ierr); 1472 PetscFunctionReturn(0); 1473 } 1474 1475 #if defined(PETSC_HAVE_PARMETIS) 1476 #include <parmetis.h> 1477 #endif 1478 1479 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1480 { 1481 #if defined(PETSC_HAVE_PARMETIS) 1482 PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data; 1483 MPI_Comm comm; 1484 PetscSection section; 1485 PetscInt nvtxs = numVertices; /* The number of vertices in full graph */ 1486 PetscInt *vtxdist; /* Distribution of vertices across processes */ 1487 PetscInt *xadj = start; /* Start of edge list for each vertex */ 1488 PetscInt *adjncy = adjacency; /* Edge lists for all vertices */ 1489 PetscInt *vwgt = NULL; /* Vertex weights */ 1490 PetscInt *adjwgt = NULL; /* Edge weights */ 1491 PetscInt wgtflag = 0; /* Indicates which weights are present */ 1492 PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */ 1493 PetscInt ncon = 1; /* The number of weights per vertex */ 1494 PetscInt metis_ptype = pm->ptype; /* kway or recursive bisection */ 1495 real_t *tpwgts; /* The fraction of vertex weights assigned to each partition */ 1496 real_t *ubvec; /* The balance intolerance for vertex weights */ 1497 PetscInt options[64]; /* Options */ 1498 /* Outputs */ 1499 PetscInt v, i, *assignment, *points; 1500 PetscMPIInt size, rank, p; 1501 PetscInt pm_randomSeed = -1; 1502 PetscErrorCode ierr; 1503 1504 PetscFunctionBegin; 1505 ierr = PetscOptionsGetInt(((PetscObject)part)->options,((PetscObject)part)->prefix,"-petscpartitioner_parmetis_seed", &pm_randomSeed, NULL);CHKERRQ(ierr); 1506 ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr); 1507 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1508 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1509 /* Calculate vertex distribution */ 1510 ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr); 1511 vtxdist[0] = 0; 1512 ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr); 1513 for (p = 2; p <= size; ++p) { 1514 vtxdist[p] += vtxdist[p-1]; 1515 } 1516 /* Calculate partition weights */ 1517 for (p = 0; p < nparts; ++p) { 1518 tpwgts[p] = 1.0/nparts; 1519 } 1520 ubvec[0] = pm->imbalanceRatio; 1521 /* Weight cells by dofs on cell by default */ 1522 ierr = DMGetSection(dm, §ion);CHKERRQ(ierr); 1523 if (section) { 1524 PetscInt cStart, cEnd, dof; 1525 1526 ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr); 1527 for (v = cStart; v < cEnd; ++v) { 1528 ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr); 1529 /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */ 1530 /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */ 1531 if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1); 1532 } 1533 } else { 1534 for (v = 0; v < nvtxs; ++v) vwgt[v] = 1; 1535 } 1536 wgtflag |= 2; /* have weights on graph vertices */ 1537 1538 if (nparts == 1) { 1539 ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr); 1540 } else { 1541 for (p = 0; !vtxdist[p+1] && p < size; ++p); 1542 if (vtxdist[p+1] == vtxdist[size]) { 1543 if (rank == p) { 1544 ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */ 1545 options[METIS_OPTION_DBGLVL] = pm->debugFlag; 1546 options[METIS_OPTION_SEED] = pm_randomSeed; 1547 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()"); 1548 if (metis_ptype == 1) { 1549 PetscStackPush("METIS_PartGraphRecursive"); 1550 ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment); 1551 PetscStackPop; 1552 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()"); 1553 } else { 1554 /* 1555 It would be nice to activate the two options below, but they would need some actual testing. 1556 - Turning on these options may exercise path of the METIS code that have bugs and may break production runs. 1557 - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case. 1558 */ 1559 /* options[METIS_OPTION_CONTIG] = 1; */ /* try to produce partitions that are contiguous */ 1560 /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */ 1561 PetscStackPush("METIS_PartGraphKway"); 1562 ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment); 1563 PetscStackPop; 1564 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()"); 1565 } 1566 } 1567 } else { 1568 options[0] = 1; /*use options */ 1569 options[1] = pm->debugFlag; 1570 options[2] = (pm_randomSeed == -1) ? 15 : pm_randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */ 1571 PetscStackPush("ParMETIS_V3_PartKway"); 1572 ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm); 1573 PetscStackPop; 1574 if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr); 1575 } 1576 } 1577 /* Convert to PetscSection+IS */ 1578 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1579 for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);} 1580 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1581 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1582 for (p = 0, i = 0; p < nparts; ++p) { 1583 for (v = 0; v < nvtxs; ++v) { 1584 if (assignment[v] == p) points[i++] = v; 1585 } 1586 } 1587 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1588 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1589 ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr); 1590 PetscFunctionReturn(0); 1591 #else 1592 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis."); 1593 #endif 1594 } 1595 1596 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part) 1597 { 1598 PetscFunctionBegin; 1599 part->ops->view = PetscPartitionerView_ParMetis; 1600 part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis; 1601 part->ops->destroy = PetscPartitionerDestroy_ParMetis; 1602 part->ops->partition = PetscPartitionerPartition_ParMetis; 1603 PetscFunctionReturn(0); 1604 } 1605 1606 /*MC 1607 PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library 1608 1609 Level: intermediate 1610 1611 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1612 M*/ 1613 1614 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part) 1615 { 1616 PetscPartitioner_ParMetis *p; 1617 PetscErrorCode ierr; 1618 1619 PetscFunctionBegin; 1620 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1621 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1622 part->data = p; 1623 1624 p->ptype = 0; 1625 p->imbalanceRatio = 1.05; 1626 p->debugFlag = 0; 1627 1628 ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr); 1629 ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr); 1630 PetscFunctionReturn(0); 1631 } 1632 1633 1634 PetscBool PTScotchPartitionercite = PETSC_FALSE; 1635 const char PTScotchPartitionerCitation[] = 1636 "@article{PTSCOTCH,\n" 1637 " author = {C. Chevalier and F. Pellegrini},\n" 1638 " title = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n" 1639 " journal = {Parallel Computing},\n" 1640 " volume = {34},\n" 1641 " number = {6},\n" 1642 " pages = {318--331},\n" 1643 " year = {2008},\n" 1644 " doi = {https://doi.org/10.1016/j.parco.2007.12.001}\n" 1645 "}\n"; 1646 1647 typedef struct { 1648 PetscInt strategy; 1649 PetscReal imbalance; 1650 } PetscPartitioner_PTScotch; 1651 1652 static const char *const 1653 PTScotchStrategyList[] = { 1654 "DEFAULT", 1655 "QUALITY", 1656 "SPEED", 1657 "BALANCE", 1658 "SAFETY", 1659 "SCALABILITY", 1660 "RECURSIVE", 1661 "REMAP" 1662 }; 1663 1664 #if defined(PETSC_HAVE_PTSCOTCH) 1665 1666 EXTERN_C_BEGIN 1667 #include <ptscotch.h> 1668 EXTERN_C_END 1669 1670 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0) 1671 1672 static int PTScotch_Strategy(PetscInt strategy) 1673 { 1674 switch (strategy) { 1675 case 0: return SCOTCH_STRATDEFAULT; 1676 case 1: return SCOTCH_STRATQUALITY; 1677 case 2: return SCOTCH_STRATSPEED; 1678 case 3: return SCOTCH_STRATBALANCE; 1679 case 4: return SCOTCH_STRATSAFETY; 1680 case 5: return SCOTCH_STRATSCALABILITY; 1681 case 6: return SCOTCH_STRATRECURSIVE; 1682 case 7: return SCOTCH_STRATREMAP; 1683 default: return SCOTCH_STRATDEFAULT; 1684 } 1685 } 1686 1687 1688 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[], 1689 SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[]) 1690 { 1691 SCOTCH_Graph grafdat; 1692 SCOTCH_Strat stradat; 1693 SCOTCH_Num vertnbr = n; 1694 SCOTCH_Num edgenbr = xadj[n]; 1695 SCOTCH_Num* velotab = vtxwgt; 1696 SCOTCH_Num* edlotab = adjwgt; 1697 SCOTCH_Num flagval = strategy; 1698 double kbalval = imbalance; 1699 PetscErrorCode ierr; 1700 1701 PetscFunctionBegin; 1702 { 1703 PetscBool flg = PETSC_TRUE; 1704 ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr); 1705 if (!flg) velotab = NULL; 1706 } 1707 ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr); 1708 ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr); 1709 ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr); 1710 ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr); 1711 #if defined(PETSC_USE_DEBUG) 1712 ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr); 1713 #endif 1714 ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr); 1715 SCOTCH_stratExit(&stradat); 1716 SCOTCH_graphExit(&grafdat); 1717 PetscFunctionReturn(0); 1718 } 1719 1720 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[], 1721 SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm) 1722 { 1723 PetscMPIInt procglbnbr; 1724 PetscMPIInt proclocnum; 1725 SCOTCH_Arch archdat; 1726 SCOTCH_Dgraph grafdat; 1727 SCOTCH_Dmapping mappdat; 1728 SCOTCH_Strat stradat; 1729 SCOTCH_Num vertlocnbr; 1730 SCOTCH_Num edgelocnbr; 1731 SCOTCH_Num* veloloctab = vtxwgt; 1732 SCOTCH_Num* edloloctab = adjwgt; 1733 SCOTCH_Num flagval = strategy; 1734 double kbalval = imbalance; 1735 PetscErrorCode ierr; 1736 1737 PetscFunctionBegin; 1738 { 1739 PetscBool flg = PETSC_TRUE; 1740 ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr); 1741 if (!flg) veloloctab = NULL; 1742 } 1743 ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr); 1744 ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr); 1745 vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum]; 1746 edgelocnbr = xadj[vertlocnbr]; 1747 1748 ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr); 1749 ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr); 1750 #if defined(PETSC_USE_DEBUG) 1751 ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr); 1752 #endif 1753 ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr); 1754 ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr); 1755 ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr); 1756 ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr); 1757 ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr); 1758 ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr); 1759 SCOTCH_dgraphMapExit(&grafdat, &mappdat); 1760 SCOTCH_archExit(&archdat); 1761 SCOTCH_stratExit(&stradat); 1762 SCOTCH_dgraphExit(&grafdat); 1763 PetscFunctionReturn(0); 1764 } 1765 1766 #endif /* PETSC_HAVE_PTSCOTCH */ 1767 1768 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part) 1769 { 1770 PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data; 1771 PetscErrorCode ierr; 1772 1773 PetscFunctionBegin; 1774 ierr = PetscFree(p);CHKERRQ(ierr); 1775 PetscFunctionReturn(0); 1776 } 1777 1778 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer) 1779 { 1780 PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data; 1781 PetscErrorCode ierr; 1782 1783 PetscFunctionBegin; 1784 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1785 ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr); 1786 ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr); 1787 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1788 PetscFunctionReturn(0); 1789 } 1790 1791 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer) 1792 { 1793 PetscBool iascii; 1794 PetscErrorCode ierr; 1795 1796 PetscFunctionBegin; 1797 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1798 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1799 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1800 if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);} 1801 PetscFunctionReturn(0); 1802 } 1803 1804 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part) 1805 { 1806 PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data; 1807 const char *const *slist = PTScotchStrategyList; 1808 PetscInt nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0])); 1809 PetscBool flag; 1810 PetscErrorCode ierr; 1811 1812 PetscFunctionBegin; 1813 ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr); 1814 ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr); 1815 ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr); 1816 ierr = PetscOptionsTail();CHKERRQ(ierr); 1817 PetscFunctionReturn(0); 1818 } 1819 1820 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1821 { 1822 #if defined(PETSC_HAVE_PTSCOTCH) 1823 MPI_Comm comm = PetscObjectComm((PetscObject)part); 1824 PetscInt nvtxs = numVertices; /* The number of vertices in full graph */ 1825 PetscInt *vtxdist; /* Distribution of vertices across processes */ 1826 PetscInt *xadj = start; /* Start of edge list for each vertex */ 1827 PetscInt *adjncy = adjacency; /* Edge lists for all vertices */ 1828 PetscInt *vwgt = NULL; /* Vertex weights */ 1829 PetscInt *adjwgt = NULL; /* Edge weights */ 1830 PetscInt v, i, *assignment, *points; 1831 PetscMPIInt size, rank, p; 1832 PetscErrorCode ierr; 1833 1834 PetscFunctionBegin; 1835 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1836 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1837 ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr); 1838 1839 /* Calculate vertex distribution */ 1840 vtxdist[0] = 0; 1841 ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr); 1842 for (p = 2; p <= size; ++p) { 1843 vtxdist[p] += vtxdist[p-1]; 1844 } 1845 1846 if (nparts == 1) { 1847 ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr); 1848 } else { 1849 PetscSection section; 1850 /* Weight cells by dofs on cell by default */ 1851 ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr); 1852 ierr = DMGetSection(dm, §ion);CHKERRQ(ierr); 1853 if (section) { 1854 PetscInt vStart, vEnd, dof; 1855 ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr); 1856 for (v = vStart; v < vEnd; ++v) { 1857 ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr); 1858 /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */ 1859 /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */ 1860 if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1); 1861 } 1862 } else { 1863 for (v = 0; v < nvtxs; ++v) vwgt[v] = 1; 1864 } 1865 { 1866 PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data; 1867 int strat = PTScotch_Strategy(pts->strategy); 1868 double imbal = (double)pts->imbalance; 1869 1870 for (p = 0; !vtxdist[p+1] && p < size; ++p); 1871 if (vtxdist[p+1] == vtxdist[size]) { 1872 if (rank == p) { 1873 ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr); 1874 } 1875 } else { 1876 ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr); 1877 } 1878 } 1879 ierr = PetscFree(vwgt);CHKERRQ(ierr); 1880 } 1881 1882 /* Convert to PetscSection+IS */ 1883 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1884 for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);} 1885 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1886 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1887 for (p = 0, i = 0; p < nparts; ++p) { 1888 for (v = 0; v < nvtxs; ++v) { 1889 if (assignment[v] == p) points[i++] = v; 1890 } 1891 } 1892 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1893 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1894 1895 ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr); 1896 PetscFunctionReturn(0); 1897 #else 1898 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch."); 1899 #endif 1900 } 1901 1902 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part) 1903 { 1904 PetscFunctionBegin; 1905 part->ops->view = PetscPartitionerView_PTScotch; 1906 part->ops->destroy = PetscPartitionerDestroy_PTScotch; 1907 part->ops->partition = PetscPartitionerPartition_PTScotch; 1908 part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch; 1909 PetscFunctionReturn(0); 1910 } 1911 1912 /*MC 1913 PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library 1914 1915 Level: intermediate 1916 1917 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1918 M*/ 1919 1920 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part) 1921 { 1922 PetscPartitioner_PTScotch *p; 1923 PetscErrorCode ierr; 1924 1925 PetscFunctionBegin; 1926 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1927 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1928 part->data = p; 1929 1930 p->strategy = 0; 1931 p->imbalance = 0.01; 1932 1933 ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr); 1934 ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr); 1935 PetscFunctionReturn(0); 1936 } 1937 1938 1939 /*@ 1940 DMPlexGetPartitioner - Get the mesh partitioner 1941 1942 Not collective 1943 1944 Input Parameter: 1945 . dm - The DM 1946 1947 Output Parameter: 1948 . part - The PetscPartitioner 1949 1950 Level: developer 1951 1952 Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner. 1953 1954 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate() 1955 @*/ 1956 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part) 1957 { 1958 DM_Plex *mesh = (DM_Plex *) dm->data; 1959 1960 PetscFunctionBegin; 1961 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1962 PetscValidPointer(part, 2); 1963 *part = mesh->partitioner; 1964 PetscFunctionReturn(0); 1965 } 1966 1967 /*@ 1968 DMPlexSetPartitioner - Set the mesh partitioner 1969 1970 logically collective on dm and part 1971 1972 Input Parameters: 1973 + dm - The DM 1974 - part - The partitioner 1975 1976 Level: developer 1977 1978 Note: Any existing PetscPartitioner will be destroyed. 1979 1980 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate() 1981 @*/ 1982 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part) 1983 { 1984 DM_Plex *mesh = (DM_Plex *) dm->data; 1985 PetscErrorCode ierr; 1986 1987 PetscFunctionBegin; 1988 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1989 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2); 1990 ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr); 1991 ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr); 1992 mesh->partitioner = part; 1993 PetscFunctionReturn(0); 1994 } 1995 1996 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down) 1997 { 1998 PetscErrorCode ierr; 1999 2000 PetscFunctionBegin; 2001 if (up) { 2002 PetscInt parent; 2003 2004 ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr); 2005 if (parent != point) { 2006 PetscInt closureSize, *closure = NULL, i; 2007 2008 ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr); 2009 for (i = 0; i < closureSize; i++) { 2010 PetscInt cpoint = closure[2*i]; 2011 2012 ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr); 2013 ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr); 2014 } 2015 ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr); 2016 } 2017 } 2018 if (down) { 2019 PetscInt numChildren; 2020 const PetscInt *children; 2021 2022 ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr); 2023 if (numChildren) { 2024 PetscInt i; 2025 2026 for (i = 0; i < numChildren; i++) { 2027 PetscInt cpoint = children[i]; 2028 2029 ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr); 2030 ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr); 2031 } 2032 } 2033 } 2034 PetscFunctionReturn(0); 2035 } 2036 2037 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*); 2038 2039 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS) 2040 { 2041 DM_Plex *mesh = (DM_Plex *)dm->data; 2042 PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE; 2043 PetscInt *closure = NULL, closureSize, nelems, *elems, off = 0, p, c; 2044 PetscHSetI ht; 2045 PetscErrorCode ierr; 2046 2047 PetscFunctionBegin; 2048 ierr = PetscHSetICreate(&ht);CHKERRQ(ierr); 2049 ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr); 2050 for (p = 0; p < numPoints; ++p) { 2051 ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 2052 for (c = 0; c < closureSize*2; c += 2) { 2053 ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr); 2054 if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);} 2055 } 2056 } 2057 if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);} 2058 ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr); 2059 ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr); 2060 ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr); 2061 ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr); 2062 ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr); 2063 ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr); 2064 PetscFunctionReturn(0); 2065 } 2066 2067 /*@ 2068 DMPlexPartitionLabelClosure - Add the closure of all points to the partition label 2069 2070 Input Parameters: 2071 + dm - The DM 2072 - label - DMLabel assinging ranks to remote roots 2073 2074 Level: developer 2075 2076 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2077 @*/ 2078 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label) 2079 { 2080 IS rankIS, pointIS, closureIS; 2081 const PetscInt *ranks, *points; 2082 PetscInt numRanks, numPoints, r; 2083 PetscErrorCode ierr; 2084 2085 PetscFunctionBegin; 2086 ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr); 2087 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2088 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2089 for (r = 0; r < numRanks; ++r) { 2090 const PetscInt rank = ranks[r]; 2091 ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr); 2092 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 2093 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 2094 ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr); 2095 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 2096 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2097 ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr); 2098 ierr = ISDestroy(&closureIS);CHKERRQ(ierr); 2099 } 2100 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2101 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2102 PetscFunctionReturn(0); 2103 } 2104 2105 /*@ 2106 DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label 2107 2108 Input Parameters: 2109 + dm - The DM 2110 - label - DMLabel assinging ranks to remote roots 2111 2112 Level: developer 2113 2114 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2115 @*/ 2116 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label) 2117 { 2118 IS rankIS, pointIS; 2119 const PetscInt *ranks, *points; 2120 PetscInt numRanks, numPoints, r, p, a, adjSize; 2121 PetscInt *adj = NULL; 2122 PetscErrorCode ierr; 2123 2124 PetscFunctionBegin; 2125 ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr); 2126 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2127 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2128 for (r = 0; r < numRanks; ++r) { 2129 const PetscInt rank = ranks[r]; 2130 2131 ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr); 2132 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 2133 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 2134 for (p = 0; p < numPoints; ++p) { 2135 adjSize = PETSC_DETERMINE; 2136 ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr); 2137 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);} 2138 } 2139 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 2140 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2141 } 2142 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2143 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2144 ierr = PetscFree(adj);CHKERRQ(ierr); 2145 PetscFunctionReturn(0); 2146 } 2147 2148 /*@ 2149 DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF 2150 2151 Input Parameters: 2152 + dm - The DM 2153 - label - DMLabel assinging ranks to remote roots 2154 2155 Level: developer 2156 2157 Note: This is required when generating multi-level overlaps to capture 2158 overlap points from non-neighbouring partitions. 2159 2160 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2161 @*/ 2162 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label) 2163 { 2164 MPI_Comm comm; 2165 PetscMPIInt rank; 2166 PetscSF sfPoint; 2167 DMLabel lblRoots, lblLeaves; 2168 IS rankIS, pointIS; 2169 const PetscInt *ranks; 2170 PetscInt numRanks, r; 2171 PetscErrorCode ierr; 2172 2173 PetscFunctionBegin; 2174 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 2175 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2176 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 2177 /* Pull point contributions from remote leaves into local roots */ 2178 ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr); 2179 ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr); 2180 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2181 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2182 for (r = 0; r < numRanks; ++r) { 2183 const PetscInt remoteRank = ranks[r]; 2184 if (remoteRank == rank) continue; 2185 ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr); 2186 ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr); 2187 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2188 } 2189 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2190 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2191 ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr); 2192 /* Push point contributions from roots into remote leaves */ 2193 ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr); 2194 ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr); 2195 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2196 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2197 for (r = 0; r < numRanks; ++r) { 2198 const PetscInt remoteRank = ranks[r]; 2199 if (remoteRank == rank) continue; 2200 ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr); 2201 ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr); 2202 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2203 } 2204 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2205 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2206 ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr); 2207 PetscFunctionReturn(0); 2208 } 2209 2210 /*@ 2211 DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label 2212 2213 Input Parameters: 2214 + dm - The DM 2215 . rootLabel - DMLabel assinging ranks to local roots 2216 . processSF - A star forest mapping into the local index on each remote rank 2217 2218 Output Parameter: 2219 - leafLabel - DMLabel assinging ranks to remote roots 2220 2221 Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The 2222 resulting leafLabel is a receiver mapping of remote roots to their parent rank. 2223 2224 Level: developer 2225 2226 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2227 @*/ 2228 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel) 2229 { 2230 MPI_Comm comm; 2231 PetscMPIInt rank, size, r; 2232 PetscInt p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize; 2233 PetscSF sfPoint; 2234 PetscSection rootSection; 2235 PetscSFNode *rootPoints, *leafPoints; 2236 const PetscSFNode *remote; 2237 const PetscInt *local, *neighbors; 2238 IS valueIS; 2239 PetscBool mpiOverflow = PETSC_FALSE; 2240 PetscErrorCode ierr; 2241 2242 PetscFunctionBegin; 2243 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 2244 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2245 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 2246 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 2247 2248 /* Convert to (point, rank) and use actual owners */ 2249 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 2250 ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr); 2251 ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr); 2252 ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr); 2253 ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr); 2254 for (n = 0; n < numNeighbors; ++n) { 2255 ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr); 2256 ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr); 2257 } 2258 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 2259 ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr); 2260 ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr); 2261 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 2262 for (n = 0; n < numNeighbors; ++n) { 2263 IS pointIS; 2264 const PetscInt *points; 2265 2266 ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr); 2267 ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr); 2268 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 2269 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 2270 for (p = 0; p < numPoints; ++p) { 2271 if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);} 2272 else {l = -1;} 2273 if (l >= 0) {rootPoints[off+p] = remote[l];} 2274 else {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;} 2275 } 2276 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 2277 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2278 } 2279 2280 /* Try to communicate overlap using All-to-All */ 2281 if (!processSF) { 2282 PetscInt64 counter = 0; 2283 PetscBool locOverflow = PETSC_FALSE; 2284 PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls; 2285 2286 ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr); 2287 for (n = 0; n < numNeighbors; ++n) { 2288 ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr); 2289 ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr); 2290 #if defined(PETSC_USE_64BIT_INDICES) 2291 if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;} 2292 if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;} 2293 #endif 2294 scounts[neighbors[n]] = (PetscMPIInt) dof; 2295 sdispls[neighbors[n]] = (PetscMPIInt) off; 2296 } 2297 ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr); 2298 for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; } 2299 if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE; 2300 ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr); 2301 if (!mpiOverflow) { 2302 leafSize = (PetscInt) counter; 2303 ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr); 2304 ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr); 2305 } 2306 ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr); 2307 } 2308 2309 /* Communicate overlap using process star forest */ 2310 if (processSF || mpiOverflow) { 2311 PetscSF procSF; 2312 PetscSFNode *remote; 2313 PetscSection leafSection; 2314 2315 if (processSF) { 2316 ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr); 2317 procSF = processSF; 2318 } else { 2319 ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr); 2320 for (r = 0; r < size; ++r) { remote[r].rank = r; remote[r].index = rank; } 2321 ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr); 2322 ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 2323 } 2324 2325 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr); 2326 ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr); 2327 ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr); 2328 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 2329 ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr); 2330 } 2331 2332 for (p = 0; p < leafSize; p++) { 2333 ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr); 2334 } 2335 2336 ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr); 2337 ierr = ISDestroy(&valueIS);CHKERRQ(ierr); 2338 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 2339 ierr = PetscFree(rootPoints);CHKERRQ(ierr); 2340 ierr = PetscFree(leafPoints);CHKERRQ(ierr); 2341 PetscFunctionReturn(0); 2342 } 2343 2344 /*@ 2345 DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points 2346 2347 Input Parameters: 2348 + dm - The DM 2349 . label - DMLabel assinging ranks to remote roots 2350 2351 Output Parameter: 2352 - sf - The star forest communication context encapsulating the defined mapping 2353 2354 Note: The incoming label is a receiver mapping of remote points to their parent rank. 2355 2356 Level: developer 2357 2358 .seealso: DMPlexDistribute(), DMPlexCreateOverlap 2359 @*/ 2360 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf) 2361 { 2362 PetscMPIInt rank, size; 2363 PetscInt n, numRemote, p, numPoints, pStart, pEnd, idx = 0; 2364 PetscSFNode *remotePoints; 2365 IS remoteRootIS; 2366 const PetscInt *remoteRoots; 2367 PetscErrorCode ierr; 2368 2369 PetscFunctionBegin; 2370 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2371 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr); 2372 2373 for (numRemote = 0, n = 0; n < size; ++n) { 2374 ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr); 2375 numRemote += numPoints; 2376 } 2377 ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr); 2378 /* Put owned points first */ 2379 ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr); 2380 if (numPoints > 0) { 2381 ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr); 2382 ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2383 for (p = 0; p < numPoints; p++) { 2384 remotePoints[idx].index = remoteRoots[p]; 2385 remotePoints[idx].rank = rank; 2386 idx++; 2387 } 2388 ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2389 ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr); 2390 } 2391 /* Now add remote points */ 2392 for (n = 0; n < size; ++n) { 2393 ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr); 2394 if (numPoints <= 0 || n == rank) continue; 2395 ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr); 2396 ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2397 for (p = 0; p < numPoints; p++) { 2398 remotePoints[idx].index = remoteRoots[p]; 2399 remotePoints[idx].rank = n; 2400 idx++; 2401 } 2402 ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2403 ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr); 2404 } 2405 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr); 2406 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 2407 ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 2408 PetscFunctionReturn(0); 2409 } 2410