1 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 #include <petsc/private/hashseti.h> 3 4 PetscClassId PETSCPARTITIONER_CLASSID = 0; 5 6 PetscFunctionList PetscPartitionerList = NULL; 7 PetscBool PetscPartitionerRegisterAllCalled = PETSC_FALSE; 8 9 PetscBool ChacoPartitionercite = PETSC_FALSE; 10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n" 11 " author = {Bruce Hendrickson and Robert Leland},\n" 12 " title = {A multilevel algorithm for partitioning graphs},\n" 13 " booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)}," 14 " isbn = {0-89791-816-9},\n" 15 " pages = {28},\n" 16 " doi = {http://doi.acm.org/10.1145/224170.224228},\n" 17 " publisher = {ACM Press},\n" 18 " address = {New York},\n" 19 " year = {1995}\n}\n"; 20 21 PetscBool ParMetisPartitionercite = PETSC_FALSE; 22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n" 23 " author = {George Karypis and Vipin Kumar},\n" 24 " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n" 25 " journal = {Journal of Parallel and Distributed Computing},\n" 26 " volume = {48},\n" 27 " pages = {71--85},\n" 28 " year = {1998}\n}\n"; 29 30 /*@C 31 DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner 32 33 Input Parameters: 34 + dm - The mesh DM dm 35 - height - Height of the strata from which to construct the graph 36 37 Output Parameter: 38 + numVertices - Number of vertices in the graph 39 . offsets - Point offsets in the graph 40 . adjacency - Point connectivity in the graph 41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency". Negative indicates that the cell is a duplicate from another process. 42 43 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function 44 representation on the mesh. 45 46 Level: developer 47 48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency() 49 @*/ 50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering) 51 { 52 PetscInt dim, depth, p, pStart, pEnd, a, adjSize, idx, size; 53 PetscInt *adj = NULL, *vOffsets = NULL, *graph = NULL; 54 IS cellNumbering; 55 const PetscInt *cellNum; 56 PetscBool useCone, useClosure; 57 PetscSection section; 58 PetscSegBuffer adjBuffer; 59 PetscSF sfPoint; 60 PetscInt *adjCells = NULL, *remoteCells = NULL; 61 const PetscInt *local; 62 PetscInt nroots, nleaves, l; 63 PetscMPIInt rank; 64 PetscErrorCode ierr; 65 66 PetscFunctionBegin; 67 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 68 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 69 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 70 if (dim != depth) { 71 /* We do not handle the uninterpolated case here */ 72 ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr); 73 /* DMPlexCreateNeighborCSR does not make a numbering */ 74 if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);} 75 /* Different behavior for empty graphs */ 76 if (!*numVertices) { 77 ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr); 78 (*offsets)[0] = 0; 79 } 80 /* Broken in parallel */ 81 if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported"); 82 PetscFunctionReturn(0); 83 } 84 ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr); 85 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 86 ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 87 /* Build adjacency graph via a section/segbuffer */ 88 ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), §ion);CHKERRQ(ierr); 89 ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr); 90 ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr); 91 /* Always use FVM adjacency to create partitioner graph */ 92 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr); 93 ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr); 94 ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr); 95 if (globalNumbering) { 96 ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr); 97 *globalNumbering = cellNumbering; 98 } 99 ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr); 100 /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells 101 Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves) 102 */ 103 ierr = PetscSFGetGraph(dm->sf, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr); 104 if (nroots >= 0) { 105 PetscInt fStart, fEnd, f; 106 107 ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr); 108 ierr = DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd);CHKERRQ(ierr); 109 for (l = 0; l < nroots; ++l) adjCells[l] = -3; 110 for (f = fStart; f < fEnd; ++f) { 111 const PetscInt *support; 112 PetscInt supportSize; 113 114 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 115 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 116 if (supportSize == 1) adjCells[f] = cellNum[support[0]]; 117 else if (supportSize == 2) { 118 ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr); 119 if (p >= 0) adjCells[f] = cellNum[support[1]]; 120 ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr); 121 if (p >= 0) adjCells[f] = cellNum[support[0]]; 122 } 123 } 124 for (l = 0; l < nroots; ++l) remoteCells[l] = -1; 125 ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr); 126 ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr); 127 ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr); 128 ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr); 129 } 130 /* Combine local and global adjacencies */ 131 for (*numVertices = 0, p = pStart; p < pEnd; p++) { 132 const PetscInt *cone; 133 PetscInt coneSize, c; 134 135 /* Skip non-owned cells in parallel (ParMetis expects no overlap) */ 136 if (nroots > 0) {if (cellNum[p] < 0) continue;} 137 /* Add remote cells */ 138 if (remoteCells) { 139 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 140 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 141 for (c = 0; c < coneSize; ++c) { 142 if (remoteCells[cone[c]] != -1) { 143 PetscInt *PETSC_RESTRICT pBuf; 144 145 ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr); 146 ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr); 147 *pBuf = remoteCells[cone[c]]; 148 } 149 } 150 } 151 /* Add local cells */ 152 adjSize = PETSC_DETERMINE; 153 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 154 for (a = 0; a < adjSize; ++a) { 155 const PetscInt point = adj[a]; 156 if (point != p && pStart <= point && point < pEnd) { 157 PetscInt *PETSC_RESTRICT pBuf; 158 ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr); 159 ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr); 160 *pBuf = cellNum[point]; 161 } 162 } 163 (*numVertices)++; 164 } 165 ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr); 166 ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr); 167 /* Derive CSR graph from section/segbuffer */ 168 ierr = PetscSectionSetUp(section);CHKERRQ(ierr); 169 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 170 ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr); 171 for (idx = 0, p = pStart; p < pEnd; p++) { 172 if (nroots > 0) {if (cellNum[p] < 0) continue;} 173 ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr); 174 } 175 vOffsets[*numVertices] = size; 176 if (offsets) *offsets = vOffsets; 177 ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr); 178 ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr); 179 ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr); 180 if (adjacency) *adjacency = graph; 181 /* Clean up */ 182 ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr); 183 ierr = PetscSectionDestroy(§ion);CHKERRQ(ierr); 184 ierr = PetscFree(adj);CHKERRQ(ierr); 185 PetscFunctionReturn(0); 186 } 187 188 /*@C 189 DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format. 190 191 Collective 192 193 Input Arguments: 194 + dm - The DMPlex 195 - cellHeight - The height of mesh points to treat as cells (default should be 0) 196 197 Output Arguments: 198 + numVertices - The number of local vertices in the graph, or cells in the mesh. 199 . offsets - The offset to the adjacency list for each cell 200 - adjacency - The adjacency list for all cells 201 202 Note: This is suitable for input to a mesh partitioner like ParMetis. 203 204 Level: advanced 205 206 .seealso: DMPlexCreate() 207 @*/ 208 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency) 209 { 210 const PetscInt maxFaceCases = 30; 211 PetscInt numFaceCases = 0; 212 PetscInt numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */ 213 PetscInt *off, *adj; 214 PetscInt *neighborCells = NULL; 215 PetscInt dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell; 216 PetscErrorCode ierr; 217 218 PetscFunctionBegin; 219 /* For parallel partitioning, I think you have to communicate supports */ 220 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 221 cellDim = dim - cellHeight; 222 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 223 ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr); 224 if (cEnd - cStart == 0) { 225 if (numVertices) *numVertices = 0; 226 if (offsets) *offsets = NULL; 227 if (adjacency) *adjacency = NULL; 228 PetscFunctionReturn(0); 229 } 230 numCells = cEnd - cStart; 231 faceDepth = depth - cellHeight; 232 if (dim == depth) { 233 PetscInt f, fStart, fEnd; 234 235 ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr); 236 /* Count neighboring cells */ 237 ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr); 238 for (f = fStart; f < fEnd; ++f) { 239 const PetscInt *support; 240 PetscInt supportSize; 241 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 242 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 243 if (supportSize == 2) { 244 PetscInt numChildren; 245 246 ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr); 247 if (!numChildren) { 248 ++off[support[0]-cStart+1]; 249 ++off[support[1]-cStart+1]; 250 } 251 } 252 } 253 /* Prefix sum */ 254 for (c = 1; c <= numCells; ++c) off[c] += off[c-1]; 255 if (adjacency) { 256 PetscInt *tmp; 257 258 ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr); 259 ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr); 260 ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr); 261 /* Get neighboring cells */ 262 for (f = fStart; f < fEnd; ++f) { 263 const PetscInt *support; 264 PetscInt supportSize; 265 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 266 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 267 if (supportSize == 2) { 268 PetscInt numChildren; 269 270 ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr); 271 if (!numChildren) { 272 adj[tmp[support[0]-cStart]++] = support[1]; 273 adj[tmp[support[1]-cStart]++] = support[0]; 274 } 275 } 276 } 277 #if defined(PETSC_USE_DEBUG) 278 for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart); 279 #endif 280 ierr = PetscFree(tmp);CHKERRQ(ierr); 281 } 282 if (numVertices) *numVertices = numCells; 283 if (offsets) *offsets = off; 284 if (adjacency) *adjacency = adj; 285 PetscFunctionReturn(0); 286 } 287 /* Setup face recognition */ 288 if (faceDepth == 1) { 289 PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */ 290 291 for (c = cStart; c < cEnd; ++c) { 292 PetscInt corners; 293 294 ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr); 295 if (!cornersSeen[corners]) { 296 PetscInt nFV; 297 298 if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases"); 299 cornersSeen[corners] = 1; 300 301 ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr); 302 303 numFaceVertices[numFaceCases++] = nFV; 304 } 305 } 306 } 307 ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr); 308 /* Count neighboring cells */ 309 for (cell = cStart; cell < cEnd; ++cell) { 310 PetscInt numNeighbors = PETSC_DETERMINE, n; 311 312 ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr); 313 /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */ 314 for (n = 0; n < numNeighbors; ++n) { 315 PetscInt cellPair[2]; 316 PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE; 317 PetscInt meetSize = 0; 318 const PetscInt *meet = NULL; 319 320 cellPair[0] = cell; cellPair[1] = neighborCells[n]; 321 if (cellPair[0] == cellPair[1]) continue; 322 if (!found) { 323 ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 324 if (meetSize) { 325 PetscInt f; 326 327 for (f = 0; f < numFaceCases; ++f) { 328 if (numFaceVertices[f] == meetSize) { 329 found = PETSC_TRUE; 330 break; 331 } 332 } 333 } 334 ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 335 } 336 if (found) ++off[cell-cStart+1]; 337 } 338 } 339 /* Prefix sum */ 340 for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1]; 341 342 if (adjacency) { 343 ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr); 344 /* Get neighboring cells */ 345 for (cell = cStart; cell < cEnd; ++cell) { 346 PetscInt numNeighbors = PETSC_DETERMINE, n; 347 PetscInt cellOffset = 0; 348 349 ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr); 350 /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */ 351 for (n = 0; n < numNeighbors; ++n) { 352 PetscInt cellPair[2]; 353 PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE; 354 PetscInt meetSize = 0; 355 const PetscInt *meet = NULL; 356 357 cellPair[0] = cell; cellPair[1] = neighborCells[n]; 358 if (cellPair[0] == cellPair[1]) continue; 359 if (!found) { 360 ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 361 if (meetSize) { 362 PetscInt f; 363 364 for (f = 0; f < numFaceCases; ++f) { 365 if (numFaceVertices[f] == meetSize) { 366 found = PETSC_TRUE; 367 break; 368 } 369 } 370 } 371 ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 372 } 373 if (found) { 374 adj[off[cell-cStart]+cellOffset] = neighborCells[n]; 375 ++cellOffset; 376 } 377 } 378 } 379 } 380 ierr = PetscFree(neighborCells);CHKERRQ(ierr); 381 if (numVertices) *numVertices = numCells; 382 if (offsets) *offsets = off; 383 if (adjacency) *adjacency = adj; 384 PetscFunctionReturn(0); 385 } 386 387 /*@C 388 PetscPartitionerRegister - Adds a new PetscPartitioner implementation 389 390 Not Collective 391 392 Input Parameters: 393 + name - The name of a new user-defined creation routine 394 - create_func - The creation routine itself 395 396 Notes: 397 PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners 398 399 Sample usage: 400 .vb 401 PetscPartitionerRegister("my_part", MyPetscPartitionerCreate); 402 .ve 403 404 Then, your PetscPartitioner type can be chosen with the procedural interface via 405 .vb 406 PetscPartitionerCreate(MPI_Comm, PetscPartitioner *); 407 PetscPartitionerSetType(PetscPartitioner, "my_part"); 408 .ve 409 or at runtime via the option 410 .vb 411 -petscpartitioner_type my_part 412 .ve 413 414 Level: advanced 415 416 .keywords: PetscPartitioner, register 417 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy() 418 419 @*/ 420 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner)) 421 { 422 PetscErrorCode ierr; 423 424 PetscFunctionBegin; 425 ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr); 426 PetscFunctionReturn(0); 427 } 428 429 /*@C 430 PetscPartitionerSetType - Builds a particular PetscPartitioner 431 432 Collective on PetscPartitioner 433 434 Input Parameters: 435 + part - The PetscPartitioner object 436 - name - The kind of partitioner 437 438 Options Database Key: 439 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types 440 441 Level: intermediate 442 443 .keywords: PetscPartitioner, set, type 444 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate() 445 @*/ 446 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name) 447 { 448 PetscErrorCode (*r)(PetscPartitioner); 449 PetscBool match; 450 PetscErrorCode ierr; 451 452 PetscFunctionBegin; 453 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 454 ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr); 455 if (match) PetscFunctionReturn(0); 456 457 ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr); 458 ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr); 459 if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name); 460 461 if (part->ops->destroy) { 462 ierr = (*part->ops->destroy)(part);CHKERRQ(ierr); 463 } 464 ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr); 465 ierr = (*r)(part);CHKERRQ(ierr); 466 ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr); 467 PetscFunctionReturn(0); 468 } 469 470 /*@C 471 PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object. 472 473 Not Collective 474 475 Input Parameter: 476 . part - The PetscPartitioner 477 478 Output Parameter: 479 . name - The PetscPartitioner type name 480 481 Level: intermediate 482 483 .keywords: PetscPartitioner, get, type, name 484 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate() 485 @*/ 486 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name) 487 { 488 PetscErrorCode ierr; 489 490 PetscFunctionBegin; 491 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 492 PetscValidPointer(name, 2); 493 ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr); 494 *name = ((PetscObject) part)->type_name; 495 PetscFunctionReturn(0); 496 } 497 498 /*@C 499 PetscPartitionerView - Views a PetscPartitioner 500 501 Collective on PetscPartitioner 502 503 Input Parameter: 504 + part - the PetscPartitioner object to view 505 - v - the viewer 506 507 Level: developer 508 509 .seealso: PetscPartitionerDestroy() 510 @*/ 511 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v) 512 { 513 PetscMPIInt size; 514 PetscBool isascii; 515 PetscErrorCode ierr; 516 517 PetscFunctionBegin; 518 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 519 if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);} 520 ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr); 521 if (isascii) { 522 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr); 523 ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr); 524 ierr = PetscViewerASCIIPrintf(v, " type: %s\n", part->hdr.type_name);CHKERRQ(ierr); 525 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 526 ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr); 527 ierr = PetscViewerASCIIPrintf(v, "balance: %.2g\n", part->balance);CHKERRQ(ierr); 528 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 529 } 530 if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);} 531 PetscFunctionReturn(0); 532 } 533 534 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType) 535 { 536 PetscFunctionBegin; 537 if (!currentType) { 538 #if defined(PETSC_HAVE_CHACO) 539 *defaultType = PETSCPARTITIONERCHACO; 540 #elif defined(PETSC_HAVE_PARMETIS) 541 *defaultType = PETSCPARTITIONERPARMETIS; 542 #elif defined(PETSC_HAVE_PTSCOTCH) 543 *defaultType = PETSCPARTITIONERPTSCOTCH; 544 #else 545 *defaultType = PETSCPARTITIONERSIMPLE; 546 #endif 547 } else { 548 *defaultType = currentType; 549 } 550 PetscFunctionReturn(0); 551 } 552 553 /*@ 554 PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database 555 556 Collective on PetscPartitioner 557 558 Input Parameter: 559 . part - the PetscPartitioner object to set options for 560 561 Level: developer 562 563 .seealso: PetscPartitionerView() 564 @*/ 565 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part) 566 { 567 const char *defaultType; 568 char name[256]; 569 PetscBool flg; 570 PetscErrorCode ierr; 571 572 PetscFunctionBegin; 573 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 574 ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr); 575 ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr); 576 ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr); 577 ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr); 578 if (flg) { 579 ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr); 580 } else if (!((PetscObject) part)->type_name) { 581 ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr); 582 } 583 if (part->ops->setfromoptions) { 584 ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr); 585 } 586 ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr); 587 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 588 ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr); 589 ierr = PetscOptionsEnd();CHKERRQ(ierr); 590 PetscFunctionReturn(0); 591 } 592 593 /*@C 594 PetscPartitionerSetUp - Construct data structures for the PetscPartitioner 595 596 Collective on PetscPartitioner 597 598 Input Parameter: 599 . part - the PetscPartitioner object to setup 600 601 Level: developer 602 603 .seealso: PetscPartitionerView(), PetscPartitionerDestroy() 604 @*/ 605 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part) 606 { 607 PetscErrorCode ierr; 608 609 PetscFunctionBegin; 610 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 611 if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);} 612 PetscFunctionReturn(0); 613 } 614 615 /*@ 616 PetscPartitionerDestroy - Destroys a PetscPartitioner object 617 618 Collective on PetscPartitioner 619 620 Input Parameter: 621 . part - the PetscPartitioner object to destroy 622 623 Level: developer 624 625 .seealso: PetscPartitionerView() 626 @*/ 627 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part) 628 { 629 PetscErrorCode ierr; 630 631 PetscFunctionBegin; 632 if (!*part) PetscFunctionReturn(0); 633 PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1); 634 635 if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);} 636 ((PetscObject) (*part))->refct = 0; 637 638 ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr); 639 if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);} 640 ierr = PetscHeaderDestroy(part);CHKERRQ(ierr); 641 PetscFunctionReturn(0); 642 } 643 644 /*@ 645 PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType(). 646 647 Collective on MPI_Comm 648 649 Input Parameter: 650 . comm - The communicator for the PetscPartitioner object 651 652 Output Parameter: 653 . part - The PetscPartitioner object 654 655 Level: beginner 656 657 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER 658 @*/ 659 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part) 660 { 661 PetscPartitioner p; 662 const char *partitionerType = NULL; 663 PetscErrorCode ierr; 664 665 PetscFunctionBegin; 666 PetscValidPointer(part, 2); 667 *part = NULL; 668 ierr = DMInitializePackage();CHKERRQ(ierr); 669 670 ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr); 671 ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr); 672 ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr); 673 674 p->edgeCut = 0; 675 p->balance = 0.0; 676 677 *part = p; 678 PetscFunctionReturn(0); 679 } 680 681 /*@ 682 PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh 683 684 Collective on DM 685 686 Input Parameters: 687 + part - The PetscPartitioner 688 - dm - The mesh DM 689 690 Output Parameters: 691 + partSection - The PetscSection giving the division of points by partition 692 - partition - The list of points by partition 693 694 Options Database: 695 . -petscpartitioner_view_graph - View the graph we are partitioning 696 697 Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight() 698 699 Level: developer 700 701 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate() 702 @*/ 703 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition) 704 { 705 PetscMPIInt size; 706 PetscErrorCode ierr; 707 708 PetscFunctionBegin; 709 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 710 PetscValidHeaderSpecific(dm, DM_CLASSID, 2); 711 PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4); 712 PetscValidPointer(partition, 5); 713 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr); 714 if (size == 1) { 715 PetscInt *points; 716 PetscInt cStart, cEnd, c; 717 718 ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr); 719 ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr); 720 ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr); 721 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 722 ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr); 723 for (c = cStart; c < cEnd; ++c) points[c] = c; 724 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 725 PetscFunctionReturn(0); 726 } 727 if (part->height == 0) { 728 PetscInt numVertices; 729 PetscInt *start = NULL; 730 PetscInt *adjacency = NULL; 731 IS globalNumbering; 732 733 ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr); 734 if (part->viewGraph) { 735 PetscViewer viewer = part->viewerGraph; 736 PetscBool isascii; 737 PetscInt v, i; 738 PetscMPIInt rank; 739 740 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr); 741 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr); 742 if (isascii) { 743 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 744 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr); 745 for (v = 0; v < numVertices; ++v) { 746 const PetscInt s = start[v]; 747 const PetscInt e = start[v+1]; 748 749 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d] ", rank);CHKERRQ(ierr); 750 for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);} 751 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr); 752 } 753 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 754 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 755 } 756 } 757 if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type"); 758 ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr); 759 ierr = PetscFree(start);CHKERRQ(ierr); 760 ierr = PetscFree(adjacency);CHKERRQ(ierr); 761 if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */ 762 const PetscInt *globalNum; 763 const PetscInt *partIdx; 764 PetscInt *map, cStart, cEnd; 765 PetscInt *adjusted, i, localSize, offset; 766 IS newPartition; 767 768 ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr); 769 ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr); 770 ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr); 771 ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr); 772 ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr); 773 ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr); 774 for (i = cStart, offset = 0; i < cEnd; i++) { 775 if (globalNum[i - cStart] >= 0) map[offset++] = i; 776 } 777 for (i = 0; i < localSize; i++) { 778 adjusted[i] = map[partIdx[i]]; 779 } 780 ierr = PetscFree(map);CHKERRQ(ierr); 781 ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr); 782 ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr); 783 ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr); 784 ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr); 785 ierr = ISDestroy(partition);CHKERRQ(ierr); 786 *partition = newPartition; 787 } 788 } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height); 789 ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr); 790 PetscFunctionReturn(0); 791 } 792 793 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part) 794 { 795 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 796 PetscErrorCode ierr; 797 798 PetscFunctionBegin; 799 ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr); 800 ierr = ISDestroy(&p->partition);CHKERRQ(ierr); 801 ierr = PetscFree(p);CHKERRQ(ierr); 802 PetscFunctionReturn(0); 803 } 804 805 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer) 806 { 807 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 808 PetscErrorCode ierr; 809 810 PetscFunctionBegin; 811 if (p->random) { 812 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 813 ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr); 814 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 815 } 816 PetscFunctionReturn(0); 817 } 818 819 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer) 820 { 821 PetscBool iascii; 822 PetscErrorCode ierr; 823 824 PetscFunctionBegin; 825 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 826 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 827 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 828 if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);} 829 PetscFunctionReturn(0); 830 } 831 832 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part) 833 { 834 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 835 PetscErrorCode ierr; 836 837 PetscFunctionBegin; 838 ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr); 839 ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr); 840 ierr = PetscOptionsTail();CHKERRQ(ierr); 841 PetscFunctionReturn(0); 842 } 843 844 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 845 { 846 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 847 PetscInt np; 848 PetscErrorCode ierr; 849 850 PetscFunctionBegin; 851 if (p->random) { 852 PetscRandom r; 853 PetscInt *sizes, *points, v, p; 854 PetscMPIInt rank; 855 856 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 857 ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr); 858 ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr); 859 ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr); 860 ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr); 861 for (v = 0; v < numVertices; ++v) {points[v] = v;} 862 for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);} 863 for (v = numVertices-1; v > 0; --v) { 864 PetscReal val; 865 PetscInt w, tmp; 866 867 ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr); 868 ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr); 869 w = PetscFloorReal(val); 870 tmp = points[v]; 871 points[v] = points[w]; 872 points[w] = tmp; 873 } 874 ierr = PetscRandomDestroy(&r);CHKERRQ(ierr); 875 ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr); 876 ierr = PetscFree2(sizes, points);CHKERRQ(ierr); 877 } 878 if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()"); 879 ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr); 880 if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np); 881 ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr); 882 if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np); 883 ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr); 884 *partition = p->partition; 885 ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr); 886 PetscFunctionReturn(0); 887 } 888 889 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part) 890 { 891 PetscFunctionBegin; 892 part->ops->view = PetscPartitionerView_Shell; 893 part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell; 894 part->ops->destroy = PetscPartitionerDestroy_Shell; 895 part->ops->partition = PetscPartitionerPartition_Shell; 896 PetscFunctionReturn(0); 897 } 898 899 /*MC 900 PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object 901 902 Level: intermediate 903 904 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 905 M*/ 906 907 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part) 908 { 909 PetscPartitioner_Shell *p; 910 PetscErrorCode ierr; 911 912 PetscFunctionBegin; 913 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 914 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 915 part->data = p; 916 917 ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr); 918 p->random = PETSC_FALSE; 919 PetscFunctionReturn(0); 920 } 921 922 /*@C 923 PetscPartitionerShellSetPartition - Set an artifical partition for a mesh 924 925 Collective on Part 926 927 Input Parameters: 928 + part - The PetscPartitioner 929 . size - The number of partitions 930 . sizes - array of size size (or NULL) providing the number of points in each partition 931 - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.) 932 933 Level: developer 934 935 Notes: 936 937 It is safe to free the sizes and points arrays after use in this routine. 938 939 .seealso DMPlexDistribute(), PetscPartitionerCreate() 940 @*/ 941 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[]) 942 { 943 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 944 PetscInt proc, numPoints; 945 PetscErrorCode ierr; 946 947 PetscFunctionBegin; 948 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 949 if (sizes) {PetscValidPointer(sizes, 3);} 950 if (points) {PetscValidPointer(points, 4);} 951 ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr); 952 ierr = ISDestroy(&p->partition);CHKERRQ(ierr); 953 ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr); 954 ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr); 955 if (sizes) { 956 for (proc = 0; proc < size; ++proc) { 957 ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr); 958 } 959 } 960 ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr); 961 ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr); 962 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr); 963 PetscFunctionReturn(0); 964 } 965 966 /*@ 967 PetscPartitionerShellSetRandom - Set the flag to use a random partition 968 969 Collective on Part 970 971 Input Parameters: 972 + part - The PetscPartitioner 973 - random - The flag to use a random partition 974 975 Level: intermediate 976 977 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate() 978 @*/ 979 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random) 980 { 981 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 982 983 PetscFunctionBegin; 984 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 985 p->random = random; 986 PetscFunctionReturn(0); 987 } 988 989 /*@ 990 PetscPartitionerShellGetRandom - get the flag to use a random partition 991 992 Collective on Part 993 994 Input Parameter: 995 . part - The PetscPartitioner 996 997 Output Parameter 998 . random - The flag to use a random partition 999 1000 Level: intermediate 1001 1002 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate() 1003 @*/ 1004 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random) 1005 { 1006 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 1007 1008 PetscFunctionBegin; 1009 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1010 PetscValidPointer(random, 2); 1011 *random = p->random; 1012 PetscFunctionReturn(0); 1013 } 1014 1015 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part) 1016 { 1017 PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data; 1018 PetscErrorCode ierr; 1019 1020 PetscFunctionBegin; 1021 ierr = PetscFree(p);CHKERRQ(ierr); 1022 PetscFunctionReturn(0); 1023 } 1024 1025 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer) 1026 { 1027 PetscFunctionBegin; 1028 PetscFunctionReturn(0); 1029 } 1030 1031 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer) 1032 { 1033 PetscBool iascii; 1034 PetscErrorCode ierr; 1035 1036 PetscFunctionBegin; 1037 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1038 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1039 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1040 if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);} 1041 PetscFunctionReturn(0); 1042 } 1043 1044 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1045 { 1046 MPI_Comm comm; 1047 PetscInt np; 1048 PetscMPIInt size; 1049 PetscErrorCode ierr; 1050 1051 PetscFunctionBegin; 1052 comm = PetscObjectComm((PetscObject)part); 1053 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1054 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1055 ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr); 1056 if (size == 1) { 1057 for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);} 1058 } else { 1059 PetscMPIInt rank; 1060 PetscInt nvGlobal, *offsets, myFirst, myLast; 1061 1062 ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr); 1063 offsets[0] = 0; 1064 ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr); 1065 for (np = 2; np <= size; np++) { 1066 offsets[np] += offsets[np-1]; 1067 } 1068 nvGlobal = offsets[size]; 1069 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1070 myFirst = offsets[rank]; 1071 myLast = offsets[rank + 1] - 1; 1072 ierr = PetscFree(offsets);CHKERRQ(ierr); 1073 if (numVertices) { 1074 PetscInt firstPart = 0, firstLargePart = 0; 1075 PetscInt lastPart = 0, lastLargePart = 0; 1076 PetscInt rem = nvGlobal % nparts; 1077 PetscInt pSmall = nvGlobal/nparts; 1078 PetscInt pBig = nvGlobal/nparts + 1; 1079 1080 1081 if (rem) { 1082 firstLargePart = myFirst / pBig; 1083 lastLargePart = myLast / pBig; 1084 1085 if (firstLargePart < rem) { 1086 firstPart = firstLargePart; 1087 } else { 1088 firstPart = rem + (myFirst - (rem * pBig)) / pSmall; 1089 } 1090 if (lastLargePart < rem) { 1091 lastPart = lastLargePart; 1092 } else { 1093 lastPart = rem + (myLast - (rem * pBig)) / pSmall; 1094 } 1095 } else { 1096 firstPart = myFirst / (nvGlobal/nparts); 1097 lastPart = myLast / (nvGlobal/nparts); 1098 } 1099 1100 for (np = firstPart; np <= lastPart; np++) { 1101 PetscInt PartStart = np * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np); 1102 PetscInt PartEnd = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1); 1103 1104 PartStart = PetscMax(PartStart,myFirst); 1105 PartEnd = PetscMin(PartEnd,myLast+1); 1106 ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr); 1107 } 1108 } 1109 } 1110 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1111 PetscFunctionReturn(0); 1112 } 1113 1114 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part) 1115 { 1116 PetscFunctionBegin; 1117 part->ops->view = PetscPartitionerView_Simple; 1118 part->ops->destroy = PetscPartitionerDestroy_Simple; 1119 part->ops->partition = PetscPartitionerPartition_Simple; 1120 PetscFunctionReturn(0); 1121 } 1122 1123 /*MC 1124 PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object 1125 1126 Level: intermediate 1127 1128 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1129 M*/ 1130 1131 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part) 1132 { 1133 PetscPartitioner_Simple *p; 1134 PetscErrorCode ierr; 1135 1136 PetscFunctionBegin; 1137 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1138 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1139 part->data = p; 1140 1141 ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr); 1142 PetscFunctionReturn(0); 1143 } 1144 1145 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part) 1146 { 1147 PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data; 1148 PetscErrorCode ierr; 1149 1150 PetscFunctionBegin; 1151 ierr = PetscFree(p);CHKERRQ(ierr); 1152 PetscFunctionReturn(0); 1153 } 1154 1155 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer) 1156 { 1157 PetscFunctionBegin; 1158 PetscFunctionReturn(0); 1159 } 1160 1161 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer) 1162 { 1163 PetscBool iascii; 1164 PetscErrorCode ierr; 1165 1166 PetscFunctionBegin; 1167 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1168 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1169 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1170 if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);} 1171 PetscFunctionReturn(0); 1172 } 1173 1174 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1175 { 1176 PetscInt np; 1177 PetscErrorCode ierr; 1178 1179 PetscFunctionBegin; 1180 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1181 ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr); 1182 ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr); 1183 for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);} 1184 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1185 PetscFunctionReturn(0); 1186 } 1187 1188 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part) 1189 { 1190 PetscFunctionBegin; 1191 part->ops->view = PetscPartitionerView_Gather; 1192 part->ops->destroy = PetscPartitionerDestroy_Gather; 1193 part->ops->partition = PetscPartitionerPartition_Gather; 1194 PetscFunctionReturn(0); 1195 } 1196 1197 /*MC 1198 PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object 1199 1200 Level: intermediate 1201 1202 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1203 M*/ 1204 1205 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part) 1206 { 1207 PetscPartitioner_Gather *p; 1208 PetscErrorCode ierr; 1209 1210 PetscFunctionBegin; 1211 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1212 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1213 part->data = p; 1214 1215 ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr); 1216 PetscFunctionReturn(0); 1217 } 1218 1219 1220 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part) 1221 { 1222 PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data; 1223 PetscErrorCode ierr; 1224 1225 PetscFunctionBegin; 1226 ierr = PetscFree(p);CHKERRQ(ierr); 1227 PetscFunctionReturn(0); 1228 } 1229 1230 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer) 1231 { 1232 PetscFunctionBegin; 1233 PetscFunctionReturn(0); 1234 } 1235 1236 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer) 1237 { 1238 PetscBool iascii; 1239 PetscErrorCode ierr; 1240 1241 PetscFunctionBegin; 1242 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1243 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1244 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1245 if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);} 1246 PetscFunctionReturn(0); 1247 } 1248 1249 #if defined(PETSC_HAVE_CHACO) 1250 #if defined(PETSC_HAVE_UNISTD_H) 1251 #include <unistd.h> 1252 #endif 1253 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT) 1254 #include <chaco.h> 1255 #else 1256 /* Older versions of Chaco do not have an include file */ 1257 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts, 1258 float *ewgts, float *x, float *y, float *z, char *outassignname, 1259 char *outfilename, short *assignment, int architecture, int ndims_tot, 1260 int mesh_dims[3], double *goal, int global_method, int local_method, 1261 int rqi_flag, int vmax, int ndims, double eigtol, long seed); 1262 #endif 1263 extern int FREE_GRAPH; 1264 #endif 1265 1266 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1267 { 1268 #if defined(PETSC_HAVE_CHACO) 1269 enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3}; 1270 MPI_Comm comm; 1271 int nvtxs = numVertices; /* number of vertices in full graph */ 1272 int *vwgts = NULL; /* weights for all vertices */ 1273 float *ewgts = NULL; /* weights for all edges */ 1274 float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */ 1275 char *outassignname = NULL; /* name of assignment output file */ 1276 char *outfilename = NULL; /* output file name */ 1277 int architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */ 1278 int ndims_tot = 0; /* total number of cube dimensions to divide */ 1279 int mesh_dims[3]; /* dimensions of mesh of processors */ 1280 double *goal = NULL; /* desired set sizes for each set */ 1281 int global_method = 1; /* global partitioning algorithm */ 1282 int local_method = 1; /* local partitioning algorithm */ 1283 int rqi_flag = 0; /* should I use RQI/Symmlq eigensolver? */ 1284 int vmax = 200; /* how many vertices to coarsen down to? */ 1285 int ndims = 1; /* number of eigenvectors (2^d sets) */ 1286 double eigtol = 0.001; /* tolerance on eigenvectors */ 1287 long seed = 123636512; /* for random graph mutations */ 1288 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT) 1289 int *assignment; /* Output partition */ 1290 #else 1291 short int *assignment; /* Output partition */ 1292 #endif 1293 int fd_stdout, fd_pipe[2]; 1294 PetscInt *points; 1295 int i, v, p; 1296 PetscErrorCode ierr; 1297 1298 PetscFunctionBegin; 1299 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1300 #if defined (PETSC_USE_DEBUG) 1301 { 1302 int ival,isum; 1303 PetscBool distributed; 1304 1305 ival = (numVertices > 0); 1306 ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr); 1307 distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE; 1308 if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph"); 1309 } 1310 #endif 1311 if (!numVertices) { 1312 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1313 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1314 ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1315 PetscFunctionReturn(0); 1316 } 1317 FREE_GRAPH = 0; /* Do not let Chaco free my memory */ 1318 for (i = 0; i < start[numVertices]; ++i) ++adjacency[i]; 1319 1320 if (global_method == INERTIAL_METHOD) { 1321 /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */ 1322 SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported"); 1323 } 1324 mesh_dims[0] = nparts; 1325 mesh_dims[1] = 1; 1326 mesh_dims[2] = 1; 1327 ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr); 1328 /* Chaco outputs to stdout. We redirect this to a buffer. */ 1329 /* TODO: check error codes for UNIX calls */ 1330 #if defined(PETSC_HAVE_UNISTD_H) 1331 { 1332 int piperet; 1333 piperet = pipe(fd_pipe); 1334 if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe"); 1335 fd_stdout = dup(1); 1336 close(1); 1337 dup2(fd_pipe[1], 1); 1338 } 1339 #endif 1340 ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename, 1341 assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag, 1342 vmax, ndims, eigtol, seed); 1343 #if defined(PETSC_HAVE_UNISTD_H) 1344 { 1345 char msgLog[10000]; 1346 int count; 1347 1348 fflush(stdout); 1349 count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char)); 1350 if (count < 0) count = 0; 1351 msgLog[count] = 0; 1352 close(1); 1353 dup2(fd_stdout, 1); 1354 close(fd_stdout); 1355 close(fd_pipe[0]); 1356 close(fd_pipe[1]); 1357 if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog); 1358 } 1359 #else 1360 if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout"); 1361 #endif 1362 /* Convert to PetscSection+IS */ 1363 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1364 for (v = 0; v < nvtxs; ++v) { 1365 ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr); 1366 } 1367 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1368 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1369 for (p = 0, i = 0; p < nparts; ++p) { 1370 for (v = 0; v < nvtxs; ++v) { 1371 if (assignment[v] == p) points[i++] = v; 1372 } 1373 } 1374 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1375 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1376 if (global_method == INERTIAL_METHOD) { 1377 /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */ 1378 } 1379 ierr = PetscFree(assignment);CHKERRQ(ierr); 1380 for (i = 0; i < start[numVertices]; ++i) --adjacency[i]; 1381 PetscFunctionReturn(0); 1382 #else 1383 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco."); 1384 #endif 1385 } 1386 1387 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part) 1388 { 1389 PetscFunctionBegin; 1390 part->ops->view = PetscPartitionerView_Chaco; 1391 part->ops->destroy = PetscPartitionerDestroy_Chaco; 1392 part->ops->partition = PetscPartitionerPartition_Chaco; 1393 PetscFunctionReturn(0); 1394 } 1395 1396 /*MC 1397 PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library 1398 1399 Level: intermediate 1400 1401 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1402 M*/ 1403 1404 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part) 1405 { 1406 PetscPartitioner_Chaco *p; 1407 PetscErrorCode ierr; 1408 1409 PetscFunctionBegin; 1410 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1411 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1412 part->data = p; 1413 1414 ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr); 1415 ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr); 1416 PetscFunctionReturn(0); 1417 } 1418 1419 static const char *ptypes[] = {"kway", "rb"}; 1420 1421 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part) 1422 { 1423 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 1424 PetscErrorCode ierr; 1425 1426 PetscFunctionBegin; 1427 ierr = PetscFree(p);CHKERRQ(ierr); 1428 PetscFunctionReturn(0); 1429 } 1430 1431 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer) 1432 { 1433 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 1434 PetscErrorCode ierr; 1435 1436 PetscFunctionBegin; 1437 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1438 ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr); 1439 ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr); 1440 ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr); 1441 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1442 PetscFunctionReturn(0); 1443 } 1444 1445 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer) 1446 { 1447 PetscBool iascii; 1448 PetscErrorCode ierr; 1449 1450 PetscFunctionBegin; 1451 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1452 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1453 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1454 if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);} 1455 PetscFunctionReturn(0); 1456 } 1457 1458 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part) 1459 { 1460 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 1461 PetscErrorCode ierr; 1462 1463 PetscFunctionBegin; 1464 ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr); 1465 ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr); 1466 ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr); 1467 ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr); 1468 ierr = PetscOptionsTail();CHKERRQ(ierr); 1469 PetscFunctionReturn(0); 1470 } 1471 1472 #if defined(PETSC_HAVE_PARMETIS) 1473 #include <parmetis.h> 1474 #endif 1475 1476 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1477 { 1478 #if defined(PETSC_HAVE_PARMETIS) 1479 PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data; 1480 MPI_Comm comm; 1481 PetscSection section; 1482 PetscInt nvtxs = numVertices; /* The number of vertices in full graph */ 1483 PetscInt *vtxdist; /* Distribution of vertices across processes */ 1484 PetscInt *xadj = start; /* Start of edge list for each vertex */ 1485 PetscInt *adjncy = adjacency; /* Edge lists for all vertices */ 1486 PetscInt *vwgt = NULL; /* Vertex weights */ 1487 PetscInt *adjwgt = NULL; /* Edge weights */ 1488 PetscInt wgtflag = 0; /* Indicates which weights are present */ 1489 PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */ 1490 PetscInt ncon = 1; /* The number of weights per vertex */ 1491 PetscInt metis_ptype = pm->ptype; /* kway or recursive bisection */ 1492 real_t *tpwgts; /* The fraction of vertex weights assigned to each partition */ 1493 real_t *ubvec; /* The balance intolerance for vertex weights */ 1494 PetscInt options[64]; /* Options */ 1495 /* Outputs */ 1496 PetscInt v, i, *assignment, *points; 1497 PetscMPIInt size, rank, p; 1498 PetscErrorCode ierr; 1499 1500 PetscFunctionBegin; 1501 ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr); 1502 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1503 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1504 /* Calculate vertex distribution */ 1505 ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr); 1506 vtxdist[0] = 0; 1507 ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr); 1508 for (p = 2; p <= size; ++p) { 1509 vtxdist[p] += vtxdist[p-1]; 1510 } 1511 /* Calculate partition weights */ 1512 for (p = 0; p < nparts; ++p) { 1513 tpwgts[p] = 1.0/nparts; 1514 } 1515 ubvec[0] = pm->imbalanceRatio; 1516 /* Weight cells by dofs on cell by default */ 1517 ierr = DMGetSection(dm, §ion);CHKERRQ(ierr); 1518 if (section) { 1519 PetscInt cStart, cEnd, dof; 1520 1521 ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr); 1522 for (v = cStart; v < cEnd; ++v) { 1523 ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr); 1524 /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */ 1525 /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */ 1526 if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1); 1527 } 1528 } else { 1529 for (v = 0; v < nvtxs; ++v) vwgt[v] = 1; 1530 } 1531 wgtflag |= 2; /* have weights on graph vertices */ 1532 1533 if (nparts == 1) { 1534 ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr); 1535 } else { 1536 for (p = 0; !vtxdist[p+1] && p < size; ++p); 1537 if (vtxdist[p+1] == vtxdist[size]) { 1538 if (rank == p) { 1539 ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */ 1540 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()"); 1541 if (metis_ptype == 1) { 1542 PetscStackPush("METIS_PartGraphRecursive"); 1543 ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment); 1544 PetscStackPop; 1545 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()"); 1546 } else { 1547 /* 1548 It would be nice to activate the two options below, but they would need some actual testing. 1549 - Turning on these options may exercise path of the METIS code that have bugs and may break production runs. 1550 - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case. 1551 */ 1552 /* options[METIS_OPTION_CONTIG] = 1; */ /* try to produce partitions that are contiguous */ 1553 /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */ 1554 PetscStackPush("METIS_PartGraphKway"); 1555 ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment); 1556 PetscStackPop; 1557 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()"); 1558 } 1559 } 1560 } else { 1561 options[0] = 1; 1562 options[1] = pm->debugFlag; 1563 PetscStackPush("ParMETIS_V3_PartKway"); 1564 ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm); 1565 PetscStackPop; 1566 if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr); 1567 } 1568 } 1569 /* Convert to PetscSection+IS */ 1570 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1571 for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);} 1572 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1573 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1574 for (p = 0, i = 0; p < nparts; ++p) { 1575 for (v = 0; v < nvtxs; ++v) { 1576 if (assignment[v] == p) points[i++] = v; 1577 } 1578 } 1579 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1580 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1581 ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr); 1582 PetscFunctionReturn(0); 1583 #else 1584 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis."); 1585 #endif 1586 } 1587 1588 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part) 1589 { 1590 PetscFunctionBegin; 1591 part->ops->view = PetscPartitionerView_ParMetis; 1592 part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis; 1593 part->ops->destroy = PetscPartitionerDestroy_ParMetis; 1594 part->ops->partition = PetscPartitionerPartition_ParMetis; 1595 PetscFunctionReturn(0); 1596 } 1597 1598 /*MC 1599 PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library 1600 1601 Level: intermediate 1602 1603 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1604 M*/ 1605 1606 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part) 1607 { 1608 PetscPartitioner_ParMetis *p; 1609 PetscErrorCode ierr; 1610 1611 PetscFunctionBegin; 1612 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1613 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1614 part->data = p; 1615 1616 p->ptype = 0; 1617 p->imbalanceRatio = 1.05; 1618 p->debugFlag = 0; 1619 1620 ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr); 1621 ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr); 1622 PetscFunctionReturn(0); 1623 } 1624 1625 1626 PetscBool PTScotchPartitionercite = PETSC_FALSE; 1627 const char PTScotchPartitionerCitation[] = 1628 "@article{PTSCOTCH,\n" 1629 " author = {C. Chevalier and F. Pellegrini},\n" 1630 " title = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n" 1631 " journal = {Parallel Computing},\n" 1632 " volume = {34},\n" 1633 " number = {6},\n" 1634 " pages = {318--331},\n" 1635 " year = {2008},\n" 1636 " doi = {https://doi.org/10.1016/j.parco.2007.12.001}\n" 1637 "}\n"; 1638 1639 typedef struct { 1640 PetscInt strategy; 1641 PetscReal imbalance; 1642 } PetscPartitioner_PTScotch; 1643 1644 static const char *const 1645 PTScotchStrategyList[] = { 1646 "DEFAULT", 1647 "QUALITY", 1648 "SPEED", 1649 "BALANCE", 1650 "SAFETY", 1651 "SCALABILITY", 1652 "RECURSIVE", 1653 "REMAP" 1654 }; 1655 1656 #if defined(PETSC_HAVE_PTSCOTCH) 1657 1658 EXTERN_C_BEGIN 1659 #include <ptscotch.h> 1660 EXTERN_C_END 1661 1662 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0) 1663 1664 static int PTScotch_Strategy(PetscInt strategy) 1665 { 1666 switch (strategy) { 1667 case 0: return SCOTCH_STRATDEFAULT; 1668 case 1: return SCOTCH_STRATQUALITY; 1669 case 2: return SCOTCH_STRATSPEED; 1670 case 3: return SCOTCH_STRATBALANCE; 1671 case 4: return SCOTCH_STRATSAFETY; 1672 case 5: return SCOTCH_STRATSCALABILITY; 1673 case 6: return SCOTCH_STRATRECURSIVE; 1674 case 7: return SCOTCH_STRATREMAP; 1675 default: return SCOTCH_STRATDEFAULT; 1676 } 1677 } 1678 1679 1680 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[], 1681 SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[]) 1682 { 1683 SCOTCH_Graph grafdat; 1684 SCOTCH_Strat stradat; 1685 SCOTCH_Num vertnbr = n; 1686 SCOTCH_Num edgenbr = xadj[n]; 1687 SCOTCH_Num* velotab = vtxwgt; 1688 SCOTCH_Num* edlotab = adjwgt; 1689 SCOTCH_Num flagval = strategy; 1690 double kbalval = imbalance; 1691 PetscErrorCode ierr; 1692 1693 PetscFunctionBegin; 1694 { 1695 PetscBool flg = PETSC_TRUE; 1696 ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr); 1697 if (!flg) velotab = NULL; 1698 } 1699 ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr); 1700 ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr); 1701 ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr); 1702 ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr); 1703 #if defined(PETSC_USE_DEBUG) 1704 ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr); 1705 #endif 1706 ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr); 1707 SCOTCH_stratExit(&stradat); 1708 SCOTCH_graphExit(&grafdat); 1709 PetscFunctionReturn(0); 1710 } 1711 1712 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[], 1713 SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm) 1714 { 1715 PetscMPIInt procglbnbr; 1716 PetscMPIInt proclocnum; 1717 SCOTCH_Arch archdat; 1718 SCOTCH_Dgraph grafdat; 1719 SCOTCH_Dmapping mappdat; 1720 SCOTCH_Strat stradat; 1721 SCOTCH_Num vertlocnbr; 1722 SCOTCH_Num edgelocnbr; 1723 SCOTCH_Num* veloloctab = vtxwgt; 1724 SCOTCH_Num* edloloctab = adjwgt; 1725 SCOTCH_Num flagval = strategy; 1726 double kbalval = imbalance; 1727 PetscErrorCode ierr; 1728 1729 PetscFunctionBegin; 1730 { 1731 PetscBool flg = PETSC_TRUE; 1732 ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr); 1733 if (!flg) veloloctab = NULL; 1734 } 1735 ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr); 1736 ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr); 1737 vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum]; 1738 edgelocnbr = xadj[vertlocnbr]; 1739 1740 ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr); 1741 ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr); 1742 #if defined(PETSC_USE_DEBUG) 1743 ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr); 1744 #endif 1745 ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr); 1746 ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr); 1747 ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr); 1748 ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr); 1749 ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr); 1750 ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr); 1751 SCOTCH_dgraphMapExit(&grafdat, &mappdat); 1752 SCOTCH_archExit(&archdat); 1753 SCOTCH_stratExit(&stradat); 1754 SCOTCH_dgraphExit(&grafdat); 1755 PetscFunctionReturn(0); 1756 } 1757 1758 #endif /* PETSC_HAVE_PTSCOTCH */ 1759 1760 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part) 1761 { 1762 PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data; 1763 PetscErrorCode ierr; 1764 1765 PetscFunctionBegin; 1766 ierr = PetscFree(p);CHKERRQ(ierr); 1767 PetscFunctionReturn(0); 1768 } 1769 1770 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer) 1771 { 1772 PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data; 1773 PetscErrorCode ierr; 1774 1775 PetscFunctionBegin; 1776 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1777 ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr); 1778 ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr); 1779 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1780 PetscFunctionReturn(0); 1781 } 1782 1783 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer) 1784 { 1785 PetscBool iascii; 1786 PetscErrorCode ierr; 1787 1788 PetscFunctionBegin; 1789 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1790 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1791 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1792 if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);} 1793 PetscFunctionReturn(0); 1794 } 1795 1796 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part) 1797 { 1798 PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data; 1799 const char *const *slist = PTScotchStrategyList; 1800 PetscInt nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0])); 1801 PetscBool flag; 1802 PetscErrorCode ierr; 1803 1804 PetscFunctionBegin; 1805 ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr); 1806 ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr); 1807 ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr); 1808 ierr = PetscOptionsTail();CHKERRQ(ierr); 1809 PetscFunctionReturn(0); 1810 } 1811 1812 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1813 { 1814 #if defined(PETSC_HAVE_PTSCOTCH) 1815 MPI_Comm comm = PetscObjectComm((PetscObject)part); 1816 PetscInt nvtxs = numVertices; /* The number of vertices in full graph */ 1817 PetscInt *vtxdist; /* Distribution of vertices across processes */ 1818 PetscInt *xadj = start; /* Start of edge list for each vertex */ 1819 PetscInt *adjncy = adjacency; /* Edge lists for all vertices */ 1820 PetscInt *vwgt = NULL; /* Vertex weights */ 1821 PetscInt *adjwgt = NULL; /* Edge weights */ 1822 PetscInt v, i, *assignment, *points; 1823 PetscMPIInt size, rank, p; 1824 PetscErrorCode ierr; 1825 1826 PetscFunctionBegin; 1827 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1828 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1829 ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr); 1830 1831 /* Calculate vertex distribution */ 1832 vtxdist[0] = 0; 1833 ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr); 1834 for (p = 2; p <= size; ++p) { 1835 vtxdist[p] += vtxdist[p-1]; 1836 } 1837 1838 if (nparts == 1) { 1839 ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr); 1840 } else { 1841 PetscSection section; 1842 /* Weight cells by dofs on cell by default */ 1843 ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr); 1844 ierr = DMGetSection(dm, §ion);CHKERRQ(ierr); 1845 if (section) { 1846 PetscInt vStart, vEnd, dof; 1847 ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr); 1848 for (v = vStart; v < vEnd; ++v) { 1849 ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr); 1850 /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */ 1851 /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */ 1852 if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1); 1853 } 1854 } else { 1855 for (v = 0; v < nvtxs; ++v) vwgt[v] = 1; 1856 } 1857 { 1858 PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data; 1859 int strat = PTScotch_Strategy(pts->strategy); 1860 double imbal = (double)pts->imbalance; 1861 1862 for (p = 0; !vtxdist[p+1] && p < size; ++p); 1863 if (vtxdist[p+1] == vtxdist[size]) { 1864 if (rank == p) { 1865 ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr); 1866 } 1867 } else { 1868 ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr); 1869 } 1870 } 1871 ierr = PetscFree(vwgt);CHKERRQ(ierr); 1872 } 1873 1874 /* Convert to PetscSection+IS */ 1875 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1876 for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);} 1877 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1878 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1879 for (p = 0, i = 0; p < nparts; ++p) { 1880 for (v = 0; v < nvtxs; ++v) { 1881 if (assignment[v] == p) points[i++] = v; 1882 } 1883 } 1884 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1885 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1886 1887 ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr); 1888 PetscFunctionReturn(0); 1889 #else 1890 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch."); 1891 #endif 1892 } 1893 1894 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part) 1895 { 1896 PetscFunctionBegin; 1897 part->ops->view = PetscPartitionerView_PTScotch; 1898 part->ops->destroy = PetscPartitionerDestroy_PTScotch; 1899 part->ops->partition = PetscPartitionerPartition_PTScotch; 1900 part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch; 1901 PetscFunctionReturn(0); 1902 } 1903 1904 /*MC 1905 PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library 1906 1907 Level: intermediate 1908 1909 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1910 M*/ 1911 1912 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part) 1913 { 1914 PetscPartitioner_PTScotch *p; 1915 PetscErrorCode ierr; 1916 1917 PetscFunctionBegin; 1918 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1919 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1920 part->data = p; 1921 1922 p->strategy = 0; 1923 p->imbalance = 0.01; 1924 1925 ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr); 1926 ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr); 1927 PetscFunctionReturn(0); 1928 } 1929 1930 1931 /*@ 1932 DMPlexGetPartitioner - Get the mesh partitioner 1933 1934 Not collective 1935 1936 Input Parameter: 1937 . dm - The DM 1938 1939 Output Parameter: 1940 . part - The PetscPartitioner 1941 1942 Level: developer 1943 1944 Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner. 1945 1946 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate() 1947 @*/ 1948 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part) 1949 { 1950 DM_Plex *mesh = (DM_Plex *) dm->data; 1951 1952 PetscFunctionBegin; 1953 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1954 PetscValidPointer(part, 2); 1955 *part = mesh->partitioner; 1956 PetscFunctionReturn(0); 1957 } 1958 1959 /*@ 1960 DMPlexSetPartitioner - Set the mesh partitioner 1961 1962 logically collective on dm and part 1963 1964 Input Parameters: 1965 + dm - The DM 1966 - part - The partitioner 1967 1968 Level: developer 1969 1970 Note: Any existing PetscPartitioner will be destroyed. 1971 1972 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate() 1973 @*/ 1974 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part) 1975 { 1976 DM_Plex *mesh = (DM_Plex *) dm->data; 1977 PetscErrorCode ierr; 1978 1979 PetscFunctionBegin; 1980 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1981 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2); 1982 ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr); 1983 ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr); 1984 mesh->partitioner = part; 1985 PetscFunctionReturn(0); 1986 } 1987 1988 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down) 1989 { 1990 PetscErrorCode ierr; 1991 1992 PetscFunctionBegin; 1993 if (up) { 1994 PetscInt parent; 1995 1996 ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr); 1997 if (parent != point) { 1998 PetscInt closureSize, *closure = NULL, i; 1999 2000 ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr); 2001 for (i = 0; i < closureSize; i++) { 2002 PetscInt cpoint = closure[2*i]; 2003 2004 ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr); 2005 ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr); 2006 } 2007 ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr); 2008 } 2009 } 2010 if (down) { 2011 PetscInt numChildren; 2012 const PetscInt *children; 2013 2014 ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr); 2015 if (numChildren) { 2016 PetscInt i; 2017 2018 for (i = 0; i < numChildren; i++) { 2019 PetscInt cpoint = children[i]; 2020 2021 ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr); 2022 ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr); 2023 } 2024 } 2025 } 2026 PetscFunctionReturn(0); 2027 } 2028 2029 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*); 2030 2031 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS) 2032 { 2033 DM_Plex *mesh = (DM_Plex *)dm->data; 2034 PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE; 2035 PetscInt *closure = NULL, closureSize, nelems, *elems, off = 0, p, c; 2036 PetscHSetI ht; 2037 PetscErrorCode ierr; 2038 2039 PetscFunctionBegin; 2040 ierr = PetscHSetICreate(&ht);CHKERRQ(ierr); 2041 ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr); 2042 for (p = 0; p < numPoints; ++p) { 2043 ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 2044 for (c = 0; c < closureSize*2; c += 2) { 2045 ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr); 2046 if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);} 2047 } 2048 } 2049 if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);} 2050 ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr); 2051 ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr); 2052 ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr); 2053 ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr); 2054 ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr); 2055 ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr); 2056 PetscFunctionReturn(0); 2057 } 2058 2059 /*@ 2060 DMPlexPartitionLabelClosure - Add the closure of all points to the partition label 2061 2062 Input Parameters: 2063 + dm - The DM 2064 - label - DMLabel assinging ranks to remote roots 2065 2066 Level: developer 2067 2068 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2069 @*/ 2070 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label) 2071 { 2072 IS rankIS, pointIS, closureIS; 2073 const PetscInt *ranks, *points; 2074 PetscInt numRanks, numPoints, r; 2075 PetscErrorCode ierr; 2076 2077 PetscFunctionBegin; 2078 ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr); 2079 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2080 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2081 for (r = 0; r < numRanks; ++r) { 2082 const PetscInt rank = ranks[r]; 2083 ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr); 2084 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 2085 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 2086 ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr); 2087 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 2088 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2089 ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr); 2090 ierr = ISDestroy(&closureIS);CHKERRQ(ierr); 2091 } 2092 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2093 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2094 PetscFunctionReturn(0); 2095 } 2096 2097 /*@ 2098 DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label 2099 2100 Input Parameters: 2101 + dm - The DM 2102 - label - DMLabel assinging ranks to remote roots 2103 2104 Level: developer 2105 2106 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2107 @*/ 2108 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label) 2109 { 2110 IS rankIS, pointIS; 2111 const PetscInt *ranks, *points; 2112 PetscInt numRanks, numPoints, r, p, a, adjSize; 2113 PetscInt *adj = NULL; 2114 PetscErrorCode ierr; 2115 2116 PetscFunctionBegin; 2117 ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr); 2118 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2119 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2120 for (r = 0; r < numRanks; ++r) { 2121 const PetscInt rank = ranks[r]; 2122 2123 ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr); 2124 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 2125 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 2126 for (p = 0; p < numPoints; ++p) { 2127 adjSize = PETSC_DETERMINE; 2128 ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr); 2129 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);} 2130 } 2131 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 2132 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2133 } 2134 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2135 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2136 ierr = PetscFree(adj);CHKERRQ(ierr); 2137 PetscFunctionReturn(0); 2138 } 2139 2140 /*@ 2141 DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF 2142 2143 Input Parameters: 2144 + dm - The DM 2145 - label - DMLabel assinging ranks to remote roots 2146 2147 Level: developer 2148 2149 Note: This is required when generating multi-level overlaps to capture 2150 overlap points from non-neighbouring partitions. 2151 2152 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2153 @*/ 2154 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label) 2155 { 2156 MPI_Comm comm; 2157 PetscMPIInt rank; 2158 PetscSF sfPoint; 2159 DMLabel lblRoots, lblLeaves; 2160 IS rankIS, pointIS; 2161 const PetscInt *ranks; 2162 PetscInt numRanks, r; 2163 PetscErrorCode ierr; 2164 2165 PetscFunctionBegin; 2166 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 2167 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2168 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 2169 /* Pull point contributions from remote leaves into local roots */ 2170 ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr); 2171 ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr); 2172 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2173 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2174 for (r = 0; r < numRanks; ++r) { 2175 const PetscInt remoteRank = ranks[r]; 2176 if (remoteRank == rank) continue; 2177 ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr); 2178 ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr); 2179 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2180 } 2181 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2182 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2183 ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr); 2184 /* Push point contributions from roots into remote leaves */ 2185 ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr); 2186 ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr); 2187 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2188 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2189 for (r = 0; r < numRanks; ++r) { 2190 const PetscInt remoteRank = ranks[r]; 2191 if (remoteRank == rank) continue; 2192 ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr); 2193 ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr); 2194 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2195 } 2196 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2197 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2198 ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr); 2199 PetscFunctionReturn(0); 2200 } 2201 2202 /*@ 2203 DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label 2204 2205 Input Parameters: 2206 + dm - The DM 2207 . rootLabel - DMLabel assinging ranks to local roots 2208 . processSF - A star forest mapping into the local index on each remote rank 2209 2210 Output Parameter: 2211 - leafLabel - DMLabel assinging ranks to remote roots 2212 2213 Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The 2214 resulting leafLabel is a receiver mapping of remote roots to their parent rank. 2215 2216 Level: developer 2217 2218 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2219 @*/ 2220 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel) 2221 { 2222 MPI_Comm comm; 2223 PetscMPIInt rank, size, r; 2224 PetscInt p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize; 2225 PetscSF sfPoint; 2226 PetscSection rootSection; 2227 PetscSFNode *rootPoints, *leafPoints; 2228 const PetscSFNode *remote; 2229 const PetscInt *local, *neighbors; 2230 IS valueIS; 2231 PetscBool mpiOverflow = PETSC_FALSE; 2232 PetscErrorCode ierr; 2233 2234 PetscFunctionBegin; 2235 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 2236 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2237 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 2238 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 2239 2240 /* Convert to (point, rank) and use actual owners */ 2241 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 2242 ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr); 2243 ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr); 2244 ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr); 2245 ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr); 2246 for (n = 0; n < numNeighbors; ++n) { 2247 ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr); 2248 ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr); 2249 } 2250 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 2251 ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr); 2252 ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr); 2253 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 2254 for (n = 0; n < numNeighbors; ++n) { 2255 IS pointIS; 2256 const PetscInt *points; 2257 2258 ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr); 2259 ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr); 2260 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 2261 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 2262 for (p = 0; p < numPoints; ++p) { 2263 if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);} 2264 else {l = -1;} 2265 if (l >= 0) {rootPoints[off+p] = remote[l];} 2266 else {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;} 2267 } 2268 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 2269 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2270 } 2271 2272 /* Try to communicate overlap using All-to-All */ 2273 if (!processSF) { 2274 PetscInt64 counter = 0; 2275 PetscBool locOverflow = PETSC_FALSE; 2276 PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls; 2277 2278 ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr); 2279 for (n = 0; n < numNeighbors; ++n) { 2280 ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr); 2281 ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr); 2282 #if defined(PETSC_USE_64BIT_INDICES) 2283 if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;} 2284 if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;} 2285 #endif 2286 scounts[neighbors[n]] = (PetscMPIInt) dof; 2287 sdispls[neighbors[n]] = (PetscMPIInt) off; 2288 } 2289 ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr); 2290 for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; } 2291 if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE; 2292 ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr); 2293 if (!mpiOverflow) { 2294 leafSize = (PetscInt) counter; 2295 ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr); 2296 ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr); 2297 } 2298 ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr); 2299 } 2300 2301 /* Communicate overlap using process star forest */ 2302 if (processSF || mpiOverflow) { 2303 PetscSF procSF; 2304 PetscSFNode *remote; 2305 PetscSection leafSection; 2306 2307 if (processSF) { 2308 ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr); 2309 procSF = processSF; 2310 } else { 2311 ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr); 2312 for (r = 0; r < size; ++r) { remote[r].rank = r; remote[r].index = rank; } 2313 ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr); 2314 ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 2315 } 2316 2317 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr); 2318 ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr); 2319 ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr); 2320 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 2321 ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr); 2322 } 2323 2324 for (p = 0; p < leafSize; p++) { 2325 ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr); 2326 } 2327 2328 ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr); 2329 ierr = ISDestroy(&valueIS);CHKERRQ(ierr); 2330 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 2331 ierr = PetscFree(rootPoints);CHKERRQ(ierr); 2332 ierr = PetscFree(leafPoints);CHKERRQ(ierr); 2333 PetscFunctionReturn(0); 2334 } 2335 2336 /*@ 2337 DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points 2338 2339 Input Parameters: 2340 + dm - The DM 2341 . label - DMLabel assinging ranks to remote roots 2342 2343 Output Parameter: 2344 - sf - The star forest communication context encapsulating the defined mapping 2345 2346 Note: The incoming label is a receiver mapping of remote points to their parent rank. 2347 2348 Level: developer 2349 2350 .seealso: DMPlexDistribute(), DMPlexCreateOverlap 2351 @*/ 2352 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf) 2353 { 2354 PetscMPIInt rank, size; 2355 PetscInt n, numRemote, p, numPoints, pStart, pEnd, idx = 0; 2356 PetscSFNode *remotePoints; 2357 IS remoteRootIS; 2358 const PetscInt *remoteRoots; 2359 PetscErrorCode ierr; 2360 2361 PetscFunctionBegin; 2362 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2363 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr); 2364 2365 for (numRemote = 0, n = 0; n < size; ++n) { 2366 ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr); 2367 numRemote += numPoints; 2368 } 2369 ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr); 2370 /* Put owned points first */ 2371 ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr); 2372 if (numPoints > 0) { 2373 ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr); 2374 ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2375 for (p = 0; p < numPoints; p++) { 2376 remotePoints[idx].index = remoteRoots[p]; 2377 remotePoints[idx].rank = rank; 2378 idx++; 2379 } 2380 ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2381 ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr); 2382 } 2383 /* Now add remote points */ 2384 for (n = 0; n < size; ++n) { 2385 ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr); 2386 if (numPoints <= 0 || n == rank) continue; 2387 ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr); 2388 ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2389 for (p = 0; p < numPoints; p++) { 2390 remotePoints[idx].index = remoteRoots[p]; 2391 remotePoints[idx].rank = n; 2392 idx++; 2393 } 2394 ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2395 ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr); 2396 } 2397 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr); 2398 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 2399 ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 2400 PetscFunctionReturn(0); 2401 } 2402