1 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 #include <petsc/private/hashseti.h> 3 4 PetscClassId PETSCPARTITIONER_CLASSID = 0; 5 6 PetscFunctionList PetscPartitionerList = NULL; 7 PetscBool PetscPartitionerRegisterAllCalled = PETSC_FALSE; 8 9 PetscBool ChacoPartitionercite = PETSC_FALSE; 10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n" 11 " author = {Bruce Hendrickson and Robert Leland},\n" 12 " title = {A multilevel algorithm for partitioning graphs},\n" 13 " booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)}," 14 " isbn = {0-89791-816-9},\n" 15 " pages = {28},\n" 16 " doi = {http://doi.acm.org/10.1145/224170.224228},\n" 17 " publisher = {ACM Press},\n" 18 " address = {New York},\n" 19 " year = {1995}\n}\n"; 20 21 PetscBool ParMetisPartitionercite = PETSC_FALSE; 22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n" 23 " author = {George Karypis and Vipin Kumar},\n" 24 " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n" 25 " journal = {Journal of Parallel and Distributed Computing},\n" 26 " volume = {48},\n" 27 " pages = {71--85},\n" 28 " year = {1998}\n}\n"; 29 30 /*@C 31 DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner 32 33 Input Parameters: 34 + dm - The mesh DM dm 35 - height - Height of the strata from which to construct the graph 36 37 Output Parameter: 38 + numVertices - Number of vertices in the graph 39 . offsets - Point offsets in the graph 40 . adjacency - Point connectivity in the graph 41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency". Negative indicates that the cell is a duplicate from another process. 42 43 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function 44 representation on the mesh. 45 46 Level: developer 47 48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency() 49 @*/ 50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering) 51 { 52 PetscInt dim, depth, p, pStart, pEnd, a, adjSize, idx, size; 53 PetscInt *adj = NULL, *vOffsets = NULL, *graph = NULL; 54 IS cellNumbering; 55 const PetscInt *cellNum; 56 PetscBool useCone, useClosure; 57 PetscSection section; 58 PetscSegBuffer adjBuffer; 59 PetscSF sfPoint; 60 PetscInt *adjCells = NULL, *remoteCells = NULL; 61 const PetscInt *local; 62 PetscInt nroots, nleaves, l; 63 PetscMPIInt rank; 64 PetscErrorCode ierr; 65 66 PetscFunctionBegin; 67 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 68 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 69 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 70 if (dim != depth) { 71 /* We do not handle the uninterpolated case here */ 72 ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr); 73 /* DMPlexCreateNeighborCSR does not make a numbering */ 74 if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);} 75 /* Different behavior for empty graphs */ 76 if (!*numVertices) { 77 ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr); 78 (*offsets)[0] = 0; 79 } 80 /* Broken in parallel */ 81 if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported"); 82 PetscFunctionReturn(0); 83 } 84 ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr); 85 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 86 ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 87 /* Build adjacency graph via a section/segbuffer */ 88 ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), §ion);CHKERRQ(ierr); 89 ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr); 90 ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr); 91 /* Always use FVM adjacency to create partitioner graph */ 92 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr); 93 ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr); 94 ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr); 95 if (globalNumbering) { 96 ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr); 97 *globalNumbering = cellNumbering; 98 } 99 ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr); 100 /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells 101 Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves) 102 */ 103 ierr = PetscSFGetGraph(dm->sf, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr); 104 if (nroots >= 0) { 105 PetscInt fStart, fEnd, f; 106 107 ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr); 108 ierr = DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd);CHKERRQ(ierr); 109 for (l = 0; l < nroots; ++l) adjCells[l] = -3; 110 for (f = fStart; f < fEnd; ++f) { 111 const PetscInt *support; 112 PetscInt supportSize; 113 114 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 115 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 116 if (supportSize == 1) adjCells[f] = cellNum[support[0]]; 117 else if (supportSize == 2) { 118 ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr); 119 if (p >= 0) adjCells[f] = cellNum[support[1]]; 120 ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr); 121 if (p >= 0) adjCells[f] = cellNum[support[0]]; 122 } 123 } 124 for (l = 0; l < nroots; ++l) remoteCells[l] = -1; 125 ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr); 126 ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr); 127 ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr); 128 ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr); 129 } 130 /* Combine local and global adjacencies */ 131 for (*numVertices = 0, p = pStart; p < pEnd; p++) { 132 const PetscInt *cone; 133 PetscInt coneSize, c; 134 135 /* Skip non-owned cells in parallel (ParMetis expects no overlap) */ 136 if (nroots > 0) {if (cellNum[p] < 0) continue;} 137 /* Add remote cells */ 138 if (remoteCells) { 139 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 140 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 141 for (c = 0; c < coneSize; ++c) { 142 if (remoteCells[cone[c]] != -1) { 143 PetscInt *PETSC_RESTRICT pBuf; 144 145 ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr); 146 ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr); 147 *pBuf = remoteCells[cone[c]]; 148 } 149 } 150 } 151 /* Add local cells */ 152 adjSize = PETSC_DETERMINE; 153 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 154 for (a = 0; a < adjSize; ++a) { 155 const PetscInt point = adj[a]; 156 if (point != p && pStart <= point && point < pEnd) { 157 PetscInt *PETSC_RESTRICT pBuf; 158 ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr); 159 ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr); 160 *pBuf = cellNum[point]; 161 } 162 } 163 (*numVertices)++; 164 } 165 ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr); 166 ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr); 167 /* Derive CSR graph from section/segbuffer */ 168 ierr = PetscSectionSetUp(section);CHKERRQ(ierr); 169 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 170 ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr); 171 for (idx = 0, p = pStart; p < pEnd; p++) { 172 if (nroots > 0) {if (cellNum[p] < 0) continue;} 173 ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr); 174 } 175 vOffsets[*numVertices] = size; 176 if (offsets) *offsets = vOffsets; 177 ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr); 178 ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr); 179 ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr); 180 if (adjacency) *adjacency = graph; 181 /* Clean up */ 182 ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr); 183 ierr = PetscSectionDestroy(§ion);CHKERRQ(ierr); 184 ierr = PetscFree(adj);CHKERRQ(ierr); 185 PetscFunctionReturn(0); 186 } 187 188 /*@C 189 DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format. 190 191 Collective 192 193 Input Arguments: 194 + dm - The DMPlex 195 - cellHeight - The height of mesh points to treat as cells (default should be 0) 196 197 Output Arguments: 198 + numVertices - The number of local vertices in the graph, or cells in the mesh. 199 . offsets - The offset to the adjacency list for each cell 200 - adjacency - The adjacency list for all cells 201 202 Note: This is suitable for input to a mesh partitioner like ParMetis. 203 204 Level: advanced 205 206 .seealso: DMPlexCreate() 207 @*/ 208 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency) 209 { 210 const PetscInt maxFaceCases = 30; 211 PetscInt numFaceCases = 0; 212 PetscInt numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */ 213 PetscInt *off, *adj; 214 PetscInt *neighborCells = NULL; 215 PetscInt dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell; 216 PetscErrorCode ierr; 217 218 PetscFunctionBegin; 219 /* For parallel partitioning, I think you have to communicate supports */ 220 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 221 cellDim = dim - cellHeight; 222 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 223 ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr); 224 if (cEnd - cStart == 0) { 225 if (numVertices) *numVertices = 0; 226 if (offsets) *offsets = NULL; 227 if (adjacency) *adjacency = NULL; 228 PetscFunctionReturn(0); 229 } 230 numCells = cEnd - cStart; 231 faceDepth = depth - cellHeight; 232 if (dim == depth) { 233 PetscInt f, fStart, fEnd; 234 235 ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr); 236 /* Count neighboring cells */ 237 ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr); 238 for (f = fStart; f < fEnd; ++f) { 239 const PetscInt *support; 240 PetscInt supportSize; 241 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 242 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 243 if (supportSize == 2) { 244 PetscInt numChildren; 245 246 ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr); 247 if (!numChildren) { 248 ++off[support[0]-cStart+1]; 249 ++off[support[1]-cStart+1]; 250 } 251 } 252 } 253 /* Prefix sum */ 254 for (c = 1; c <= numCells; ++c) off[c] += off[c-1]; 255 if (adjacency) { 256 PetscInt *tmp; 257 258 ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr); 259 ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr); 260 ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr); 261 /* Get neighboring cells */ 262 for (f = fStart; f < fEnd; ++f) { 263 const PetscInt *support; 264 PetscInt supportSize; 265 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 266 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 267 if (supportSize == 2) { 268 PetscInt numChildren; 269 270 ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr); 271 if (!numChildren) { 272 adj[tmp[support[0]-cStart]++] = support[1]; 273 adj[tmp[support[1]-cStart]++] = support[0]; 274 } 275 } 276 } 277 #if defined(PETSC_USE_DEBUG) 278 for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart); 279 #endif 280 ierr = PetscFree(tmp);CHKERRQ(ierr); 281 } 282 if (numVertices) *numVertices = numCells; 283 if (offsets) *offsets = off; 284 if (adjacency) *adjacency = adj; 285 PetscFunctionReturn(0); 286 } 287 /* Setup face recognition */ 288 if (faceDepth == 1) { 289 PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */ 290 291 for (c = cStart; c < cEnd; ++c) { 292 PetscInt corners; 293 294 ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr); 295 if (!cornersSeen[corners]) { 296 PetscInt nFV; 297 298 if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases"); 299 cornersSeen[corners] = 1; 300 301 ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr); 302 303 numFaceVertices[numFaceCases++] = nFV; 304 } 305 } 306 } 307 ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr); 308 /* Count neighboring cells */ 309 for (cell = cStart; cell < cEnd; ++cell) { 310 PetscInt numNeighbors = PETSC_DETERMINE, n; 311 312 ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr); 313 /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */ 314 for (n = 0; n < numNeighbors; ++n) { 315 PetscInt cellPair[2]; 316 PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE; 317 PetscInt meetSize = 0; 318 const PetscInt *meet = NULL; 319 320 cellPair[0] = cell; cellPair[1] = neighborCells[n]; 321 if (cellPair[0] == cellPair[1]) continue; 322 if (!found) { 323 ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 324 if (meetSize) { 325 PetscInt f; 326 327 for (f = 0; f < numFaceCases; ++f) { 328 if (numFaceVertices[f] == meetSize) { 329 found = PETSC_TRUE; 330 break; 331 } 332 } 333 } 334 ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 335 } 336 if (found) ++off[cell-cStart+1]; 337 } 338 } 339 /* Prefix sum */ 340 for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1]; 341 342 if (adjacency) { 343 ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr); 344 /* Get neighboring cells */ 345 for (cell = cStart; cell < cEnd; ++cell) { 346 PetscInt numNeighbors = PETSC_DETERMINE, n; 347 PetscInt cellOffset = 0; 348 349 ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr); 350 /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */ 351 for (n = 0; n < numNeighbors; ++n) { 352 PetscInt cellPair[2]; 353 PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE; 354 PetscInt meetSize = 0; 355 const PetscInt *meet = NULL; 356 357 cellPair[0] = cell; cellPair[1] = neighborCells[n]; 358 if (cellPair[0] == cellPair[1]) continue; 359 if (!found) { 360 ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 361 if (meetSize) { 362 PetscInt f; 363 364 for (f = 0; f < numFaceCases; ++f) { 365 if (numFaceVertices[f] == meetSize) { 366 found = PETSC_TRUE; 367 break; 368 } 369 } 370 } 371 ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 372 } 373 if (found) { 374 adj[off[cell-cStart]+cellOffset] = neighborCells[n]; 375 ++cellOffset; 376 } 377 } 378 } 379 } 380 ierr = PetscFree(neighborCells);CHKERRQ(ierr); 381 if (numVertices) *numVertices = numCells; 382 if (offsets) *offsets = off; 383 if (adjacency) *adjacency = adj; 384 PetscFunctionReturn(0); 385 } 386 387 /*@C 388 PetscPartitionerRegister - Adds a new PetscPartitioner implementation 389 390 Not Collective 391 392 Input Parameters: 393 + name - The name of a new user-defined creation routine 394 - create_func - The creation routine itself 395 396 Notes: 397 PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners 398 399 Sample usage: 400 .vb 401 PetscPartitionerRegister("my_part", MyPetscPartitionerCreate); 402 .ve 403 404 Then, your PetscPartitioner type can be chosen with the procedural interface via 405 .vb 406 PetscPartitionerCreate(MPI_Comm, PetscPartitioner *); 407 PetscPartitionerSetType(PetscPartitioner, "my_part"); 408 .ve 409 or at runtime via the option 410 .vb 411 -petscpartitioner_type my_part 412 .ve 413 414 Level: advanced 415 416 .keywords: PetscPartitioner, register 417 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy() 418 419 @*/ 420 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner)) 421 { 422 PetscErrorCode ierr; 423 424 PetscFunctionBegin; 425 ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr); 426 PetscFunctionReturn(0); 427 } 428 429 /*@C 430 PetscPartitionerSetType - Builds a particular PetscPartitioner 431 432 Collective on PetscPartitioner 433 434 Input Parameters: 435 + part - The PetscPartitioner object 436 - name - The kind of partitioner 437 438 Options Database Key: 439 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types 440 441 Level: intermediate 442 443 .keywords: PetscPartitioner, set, type 444 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate() 445 @*/ 446 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name) 447 { 448 PetscErrorCode (*r)(PetscPartitioner); 449 PetscBool match; 450 PetscErrorCode ierr; 451 452 PetscFunctionBegin; 453 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 454 ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr); 455 if (match) PetscFunctionReturn(0); 456 457 ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr); 458 ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr); 459 if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name); 460 461 if (part->ops->destroy) { 462 ierr = (*part->ops->destroy)(part);CHKERRQ(ierr); 463 } 464 ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr); 465 ierr = (*r)(part);CHKERRQ(ierr); 466 ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr); 467 PetscFunctionReturn(0); 468 } 469 470 /*@C 471 PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object. 472 473 Not Collective 474 475 Input Parameter: 476 . part - The PetscPartitioner 477 478 Output Parameter: 479 . name - The PetscPartitioner type name 480 481 Level: intermediate 482 483 .keywords: PetscPartitioner, get, type, name 484 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate() 485 @*/ 486 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name) 487 { 488 PetscErrorCode ierr; 489 490 PetscFunctionBegin; 491 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 492 PetscValidPointer(name, 2); 493 ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr); 494 *name = ((PetscObject) part)->type_name; 495 PetscFunctionReturn(0); 496 } 497 498 /*@C 499 PetscPartitionerView - Views a PetscPartitioner 500 501 Collective on PetscPartitioner 502 503 Input Parameter: 504 + part - the PetscPartitioner object to view 505 - v - the viewer 506 507 Level: developer 508 509 .seealso: PetscPartitionerDestroy() 510 @*/ 511 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v) 512 { 513 PetscMPIInt size; 514 PetscBool isascii; 515 PetscErrorCode ierr; 516 517 PetscFunctionBegin; 518 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 519 if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);} 520 ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr); 521 if (isascii) { 522 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr); 523 ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr); 524 ierr = PetscViewerASCIIPrintf(v, " type: %s\n", part->hdr.type_name);CHKERRQ(ierr); 525 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 526 ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr); 527 ierr = PetscViewerASCIIPrintf(v, "balance: %.2g\n", part->balance);CHKERRQ(ierr); 528 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 529 } 530 if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);} 531 PetscFunctionReturn(0); 532 } 533 534 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType) 535 { 536 PetscFunctionBegin; 537 if (!currentType) { 538 #if defined(PETSC_HAVE_CHACO) 539 *defaultType = PETSCPARTITIONERCHACO; 540 #elif defined(PETSC_HAVE_PARMETIS) 541 *defaultType = PETSCPARTITIONERPARMETIS; 542 #elif defined(PETSC_HAVE_PTSCOTCH) 543 *defaultType = PETSCPARTITIONERPTSCOTCH; 544 #else 545 *defaultType = PETSCPARTITIONERSIMPLE; 546 #endif 547 } else { 548 *defaultType = currentType; 549 } 550 PetscFunctionReturn(0); 551 } 552 553 /*@ 554 PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database 555 556 Collective on PetscPartitioner 557 558 Input Parameter: 559 . part - the PetscPartitioner object to set options for 560 561 Level: developer 562 563 .seealso: PetscPartitionerView() 564 @*/ 565 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part) 566 { 567 const char *defaultType; 568 char name[256]; 569 PetscBool flg; 570 PetscErrorCode ierr; 571 572 PetscFunctionBegin; 573 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 574 ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr); 575 ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr); 576 ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr); 577 ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr); 578 if (flg) { 579 ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr); 580 } else if (!((PetscObject) part)->type_name) { 581 ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr); 582 } 583 if (part->ops->setfromoptions) { 584 ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr); 585 } 586 ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr); 587 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 588 ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr); 589 ierr = PetscOptionsEnd();CHKERRQ(ierr); 590 PetscFunctionReturn(0); 591 } 592 593 /*@C 594 PetscPartitionerSetUp - Construct data structures for the PetscPartitioner 595 596 Collective on PetscPartitioner 597 598 Input Parameter: 599 . part - the PetscPartitioner object to setup 600 601 Level: developer 602 603 .seealso: PetscPartitionerView(), PetscPartitionerDestroy() 604 @*/ 605 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part) 606 { 607 PetscErrorCode ierr; 608 609 PetscFunctionBegin; 610 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 611 if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);} 612 PetscFunctionReturn(0); 613 } 614 615 /*@ 616 PetscPartitionerDestroy - Destroys a PetscPartitioner object 617 618 Collective on PetscPartitioner 619 620 Input Parameter: 621 . part - the PetscPartitioner object to destroy 622 623 Level: developer 624 625 .seealso: PetscPartitionerView() 626 @*/ 627 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part) 628 { 629 PetscErrorCode ierr; 630 631 PetscFunctionBegin; 632 if (!*part) PetscFunctionReturn(0); 633 PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1); 634 635 if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);} 636 ((PetscObject) (*part))->refct = 0; 637 638 ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr); 639 if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);} 640 ierr = PetscHeaderDestroy(part);CHKERRQ(ierr); 641 PetscFunctionReturn(0); 642 } 643 644 /*@ 645 PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType(). 646 647 Collective on MPI_Comm 648 649 Input Parameter: 650 . comm - The communicator for the PetscPartitioner object 651 652 Output Parameter: 653 . part - The PetscPartitioner object 654 655 Level: beginner 656 657 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER 658 @*/ 659 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part) 660 { 661 PetscPartitioner p; 662 const char *partitionerType = NULL; 663 PetscErrorCode ierr; 664 665 PetscFunctionBegin; 666 PetscValidPointer(part, 2); 667 *part = NULL; 668 ierr = DMInitializePackage();CHKERRQ(ierr); 669 670 ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr); 671 ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr); 672 ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr); 673 674 p->edgeCut = 0; 675 p->balance = 0.0; 676 677 *part = p; 678 PetscFunctionReturn(0); 679 } 680 681 /*@ 682 PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh 683 684 Collective on DM 685 686 Input Parameters: 687 + part - The PetscPartitioner 688 - dm - The mesh DM 689 690 Output Parameters: 691 + partSection - The PetscSection giving the division of points by partition 692 - partition - The list of points by partition 693 694 Options Database: 695 . -petscpartitioner_view_graph - View the graph we are partitioning 696 697 Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight() 698 699 Level: developer 700 701 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate() 702 @*/ 703 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition) 704 { 705 PetscMPIInt size; 706 PetscErrorCode ierr; 707 708 PetscFunctionBegin; 709 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 710 PetscValidHeaderSpecific(dm, DM_CLASSID, 2); 711 PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4); 712 PetscValidPointer(partition, 5); 713 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr); 714 if (size == 1) { 715 PetscInt *points; 716 PetscInt cStart, cEnd, c; 717 718 ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr); 719 ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr); 720 ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr); 721 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 722 ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr); 723 for (c = cStart; c < cEnd; ++c) points[c] = c; 724 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 725 PetscFunctionReturn(0); 726 } 727 if (part->height == 0) { 728 PetscInt numVertices; 729 PetscInt *start = NULL; 730 PetscInt *adjacency = NULL; 731 IS globalNumbering; 732 733 ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr); 734 if (part->viewGraph) { 735 PetscViewer viewer = part->viewerGraph; 736 PetscBool isascii; 737 PetscInt v, i; 738 PetscMPIInt rank; 739 740 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr); 741 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr); 742 if (isascii) { 743 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 744 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr); 745 for (v = 0; v < numVertices; ++v) { 746 const PetscInt s = start[v]; 747 const PetscInt e = start[v+1]; 748 749 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d] ", rank);CHKERRQ(ierr); 750 for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);} 751 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr); 752 } 753 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 754 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 755 } 756 } 757 if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type"); 758 ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr); 759 ierr = PetscFree(start);CHKERRQ(ierr); 760 ierr = PetscFree(adjacency);CHKERRQ(ierr); 761 if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */ 762 const PetscInt *globalNum; 763 const PetscInt *partIdx; 764 PetscInt *map, cStart, cEnd; 765 PetscInt *adjusted, i, localSize, offset; 766 IS newPartition; 767 768 ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr); 769 ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr); 770 ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr); 771 ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr); 772 ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr); 773 ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr); 774 for (i = cStart, offset = 0; i < cEnd; i++) { 775 if (globalNum[i - cStart] >= 0) map[offset++] = i; 776 } 777 for (i = 0; i < localSize; i++) { 778 adjusted[i] = map[partIdx[i]]; 779 } 780 ierr = PetscFree(map);CHKERRQ(ierr); 781 ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr); 782 ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr); 783 ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr); 784 ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr); 785 ierr = ISDestroy(partition);CHKERRQ(ierr); 786 *partition = newPartition; 787 } 788 } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height); 789 ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr); 790 PetscFunctionReturn(0); 791 } 792 793 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part) 794 { 795 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 796 PetscErrorCode ierr; 797 798 PetscFunctionBegin; 799 ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr); 800 ierr = ISDestroy(&p->partition);CHKERRQ(ierr); 801 ierr = PetscFree(p);CHKERRQ(ierr); 802 PetscFunctionReturn(0); 803 } 804 805 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer) 806 { 807 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 808 PetscErrorCode ierr; 809 810 PetscFunctionBegin; 811 if (p->random) { 812 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 813 ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr); 814 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 815 } 816 PetscFunctionReturn(0); 817 } 818 819 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer) 820 { 821 PetscBool iascii; 822 PetscErrorCode ierr; 823 824 PetscFunctionBegin; 825 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 826 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 827 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 828 if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);} 829 PetscFunctionReturn(0); 830 } 831 832 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part) 833 { 834 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 835 PetscErrorCode ierr; 836 837 PetscFunctionBegin; 838 ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr); 839 ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr); 840 ierr = PetscOptionsTail();CHKERRQ(ierr); 841 PetscFunctionReturn(0); 842 } 843 844 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 845 { 846 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 847 PetscInt np; 848 PetscErrorCode ierr; 849 850 PetscFunctionBegin; 851 if (p->random) { 852 PetscRandom r; 853 PetscInt *sizes, *points, v, p; 854 PetscMPIInt rank; 855 856 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 857 ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr); 858 ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr); 859 ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr); 860 ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr); 861 for (v = 0; v < numVertices; ++v) {points[v] = v;} 862 for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);} 863 for (v = numVertices-1; v > 0; --v) { 864 PetscReal val; 865 PetscInt w, tmp; 866 867 ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr); 868 ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr); 869 w = PetscFloorReal(val); 870 tmp = points[v]; 871 points[v] = points[w]; 872 points[w] = tmp; 873 } 874 ierr = PetscRandomDestroy(&r);CHKERRQ(ierr); 875 ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr); 876 ierr = PetscFree2(sizes, points);CHKERRQ(ierr); 877 } 878 if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()"); 879 ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr); 880 if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np); 881 ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr); 882 if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np); 883 ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr); 884 *partition = p->partition; 885 ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr); 886 PetscFunctionReturn(0); 887 } 888 889 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part) 890 { 891 PetscFunctionBegin; 892 part->ops->view = PetscPartitionerView_Shell; 893 part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell; 894 part->ops->destroy = PetscPartitionerDestroy_Shell; 895 part->ops->partition = PetscPartitionerPartition_Shell; 896 PetscFunctionReturn(0); 897 } 898 899 /*MC 900 PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object 901 902 Level: intermediate 903 904 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 905 M*/ 906 907 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part) 908 { 909 PetscPartitioner_Shell *p; 910 PetscErrorCode ierr; 911 912 PetscFunctionBegin; 913 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 914 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 915 part->data = p; 916 917 ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr); 918 p->random = PETSC_FALSE; 919 PetscFunctionReturn(0); 920 } 921 922 /*@C 923 PetscPartitionerShellSetPartition - Set an artifical partition for a mesh 924 925 Collective on Part 926 927 Input Parameters: 928 + part - The PetscPartitioner 929 . size - The number of partitions 930 . sizes - array of size size (or NULL) providing the number of points in each partition 931 - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.) 932 933 Level: developer 934 935 Notes: 936 937 It is safe to free the sizes and points arrays after use in this routine. 938 939 .seealso DMPlexDistribute(), PetscPartitionerCreate() 940 @*/ 941 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[]) 942 { 943 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 944 PetscInt proc, numPoints; 945 PetscErrorCode ierr; 946 947 PetscFunctionBegin; 948 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 949 if (sizes) {PetscValidPointer(sizes, 3);} 950 if (points) {PetscValidPointer(points, 4);} 951 ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr); 952 ierr = ISDestroy(&p->partition);CHKERRQ(ierr); 953 ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr); 954 ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr); 955 if (sizes) { 956 for (proc = 0; proc < size; ++proc) { 957 ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr); 958 } 959 } 960 ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr); 961 ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr); 962 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr); 963 PetscFunctionReturn(0); 964 } 965 966 /*@ 967 PetscPartitionerShellSetRandom - Set the flag to use a random partition 968 969 Collective on Part 970 971 Input Parameters: 972 + part - The PetscPartitioner 973 - random - The flag to use a random partition 974 975 Level: intermediate 976 977 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate() 978 @*/ 979 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random) 980 { 981 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 982 983 PetscFunctionBegin; 984 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 985 p->random = random; 986 PetscFunctionReturn(0); 987 } 988 989 /*@ 990 PetscPartitionerShellGetRandom - get the flag to use a random partition 991 992 Collective on Part 993 994 Input Parameter: 995 . part - The PetscPartitioner 996 997 Output Parameter 998 . random - The flag to use a random partition 999 1000 Level: intermediate 1001 1002 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate() 1003 @*/ 1004 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random) 1005 { 1006 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 1007 1008 PetscFunctionBegin; 1009 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1010 PetscValidPointer(random, 2); 1011 *random = p->random; 1012 PetscFunctionReturn(0); 1013 } 1014 1015 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part) 1016 { 1017 PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data; 1018 PetscErrorCode ierr; 1019 1020 PetscFunctionBegin; 1021 ierr = PetscFree(p);CHKERRQ(ierr); 1022 PetscFunctionReturn(0); 1023 } 1024 1025 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer) 1026 { 1027 PetscFunctionBegin; 1028 PetscFunctionReturn(0); 1029 } 1030 1031 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer) 1032 { 1033 PetscBool iascii; 1034 PetscErrorCode ierr; 1035 1036 PetscFunctionBegin; 1037 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1038 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1039 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1040 if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);} 1041 PetscFunctionReturn(0); 1042 } 1043 1044 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1045 { 1046 MPI_Comm comm; 1047 PetscInt np; 1048 PetscMPIInt size; 1049 PetscErrorCode ierr; 1050 1051 PetscFunctionBegin; 1052 comm = PetscObjectComm((PetscObject)dm); 1053 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1054 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1055 ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr); 1056 if (size == 1) { 1057 for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);} 1058 } 1059 else { 1060 PetscMPIInt rank; 1061 PetscInt nvGlobal, *offsets, myFirst, myLast; 1062 1063 ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr); 1064 offsets[0] = 0; 1065 ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr); 1066 for (np = 2; np <= size; np++) { 1067 offsets[np] += offsets[np-1]; 1068 } 1069 nvGlobal = offsets[size]; 1070 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1071 myFirst = offsets[rank]; 1072 myLast = offsets[rank + 1] - 1; 1073 ierr = PetscFree(offsets);CHKERRQ(ierr); 1074 if (numVertices) { 1075 PetscInt firstPart = 0, firstLargePart = 0; 1076 PetscInt lastPart = 0, lastLargePart = 0; 1077 PetscInt rem = nvGlobal % nparts; 1078 PetscInt pSmall = nvGlobal/nparts; 1079 PetscInt pBig = nvGlobal/nparts + 1; 1080 1081 1082 if (rem) { 1083 firstLargePart = myFirst / pBig; 1084 lastLargePart = myLast / pBig; 1085 1086 if (firstLargePart < rem) { 1087 firstPart = firstLargePart; 1088 } 1089 else { 1090 firstPart = rem + (myFirst - (rem * pBig)) / pSmall; 1091 } 1092 if (lastLargePart < rem) { 1093 lastPart = lastLargePart; 1094 } 1095 else { 1096 lastPart = rem + (myLast - (rem * pBig)) / pSmall; 1097 } 1098 } 1099 else { 1100 firstPart = myFirst / (nvGlobal/nparts); 1101 lastPart = myLast / (nvGlobal/nparts); 1102 } 1103 1104 for (np = firstPart; np <= lastPart; np++) { 1105 PetscInt PartStart = np * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np); 1106 PetscInt PartEnd = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1); 1107 1108 PartStart = PetscMax(PartStart,myFirst); 1109 PartEnd = PetscMin(PartEnd,myLast+1); 1110 ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr); 1111 } 1112 } 1113 } 1114 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1115 PetscFunctionReturn(0); 1116 } 1117 1118 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part) 1119 { 1120 PetscFunctionBegin; 1121 part->ops->view = PetscPartitionerView_Simple; 1122 part->ops->destroy = PetscPartitionerDestroy_Simple; 1123 part->ops->partition = PetscPartitionerPartition_Simple; 1124 PetscFunctionReturn(0); 1125 } 1126 1127 /*MC 1128 PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object 1129 1130 Level: intermediate 1131 1132 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1133 M*/ 1134 1135 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part) 1136 { 1137 PetscPartitioner_Simple *p; 1138 PetscErrorCode ierr; 1139 1140 PetscFunctionBegin; 1141 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1142 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1143 part->data = p; 1144 1145 ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr); 1146 PetscFunctionReturn(0); 1147 } 1148 1149 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part) 1150 { 1151 PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data; 1152 PetscErrorCode ierr; 1153 1154 PetscFunctionBegin; 1155 ierr = PetscFree(p);CHKERRQ(ierr); 1156 PetscFunctionReturn(0); 1157 } 1158 1159 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer) 1160 { 1161 PetscFunctionBegin; 1162 PetscFunctionReturn(0); 1163 } 1164 1165 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer) 1166 { 1167 PetscBool iascii; 1168 PetscErrorCode ierr; 1169 1170 PetscFunctionBegin; 1171 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1172 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1173 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1174 if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);} 1175 PetscFunctionReturn(0); 1176 } 1177 1178 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1179 { 1180 PetscInt np; 1181 PetscErrorCode ierr; 1182 1183 PetscFunctionBegin; 1184 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1185 ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr); 1186 ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr); 1187 for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);} 1188 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1189 PetscFunctionReturn(0); 1190 } 1191 1192 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part) 1193 { 1194 PetscFunctionBegin; 1195 part->ops->view = PetscPartitionerView_Gather; 1196 part->ops->destroy = PetscPartitionerDestroy_Gather; 1197 part->ops->partition = PetscPartitionerPartition_Gather; 1198 PetscFunctionReturn(0); 1199 } 1200 1201 /*MC 1202 PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object 1203 1204 Level: intermediate 1205 1206 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1207 M*/ 1208 1209 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part) 1210 { 1211 PetscPartitioner_Gather *p; 1212 PetscErrorCode ierr; 1213 1214 PetscFunctionBegin; 1215 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1216 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1217 part->data = p; 1218 1219 ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr); 1220 PetscFunctionReturn(0); 1221 } 1222 1223 1224 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part) 1225 { 1226 PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data; 1227 PetscErrorCode ierr; 1228 1229 PetscFunctionBegin; 1230 ierr = PetscFree(p);CHKERRQ(ierr); 1231 PetscFunctionReturn(0); 1232 } 1233 1234 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer) 1235 { 1236 PetscFunctionBegin; 1237 PetscFunctionReturn(0); 1238 } 1239 1240 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer) 1241 { 1242 PetscBool iascii; 1243 PetscErrorCode ierr; 1244 1245 PetscFunctionBegin; 1246 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1247 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1248 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1249 if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);} 1250 PetscFunctionReturn(0); 1251 } 1252 1253 #if defined(PETSC_HAVE_CHACO) 1254 #if defined(PETSC_HAVE_UNISTD_H) 1255 #include <unistd.h> 1256 #endif 1257 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT) 1258 #include <chaco.h> 1259 #else 1260 /* Older versions of Chaco do not have an include file */ 1261 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts, 1262 float *ewgts, float *x, float *y, float *z, char *outassignname, 1263 char *outfilename, short *assignment, int architecture, int ndims_tot, 1264 int mesh_dims[3], double *goal, int global_method, int local_method, 1265 int rqi_flag, int vmax, int ndims, double eigtol, long seed); 1266 #endif 1267 extern int FREE_GRAPH; 1268 #endif 1269 1270 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1271 { 1272 #if defined(PETSC_HAVE_CHACO) 1273 enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3}; 1274 MPI_Comm comm; 1275 int nvtxs = numVertices; /* number of vertices in full graph */ 1276 int *vwgts = NULL; /* weights for all vertices */ 1277 float *ewgts = NULL; /* weights for all edges */ 1278 float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */ 1279 char *outassignname = NULL; /* name of assignment output file */ 1280 char *outfilename = NULL; /* output file name */ 1281 int architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */ 1282 int ndims_tot = 0; /* total number of cube dimensions to divide */ 1283 int mesh_dims[3]; /* dimensions of mesh of processors */ 1284 double *goal = NULL; /* desired set sizes for each set */ 1285 int global_method = 1; /* global partitioning algorithm */ 1286 int local_method = 1; /* local partitioning algorithm */ 1287 int rqi_flag = 0; /* should I use RQI/Symmlq eigensolver? */ 1288 int vmax = 200; /* how many vertices to coarsen down to? */ 1289 int ndims = 1; /* number of eigenvectors (2^d sets) */ 1290 double eigtol = 0.001; /* tolerance on eigenvectors */ 1291 long seed = 123636512; /* for random graph mutations */ 1292 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT) 1293 int *assignment; /* Output partition */ 1294 #else 1295 short int *assignment; /* Output partition */ 1296 #endif 1297 int fd_stdout, fd_pipe[2]; 1298 PetscInt *points; 1299 int i, v, p; 1300 PetscErrorCode ierr; 1301 1302 PetscFunctionBegin; 1303 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1304 #if defined (PETSC_USE_DEBUG) 1305 { 1306 int ival,isum; 1307 PetscBool distributed; 1308 1309 ival = (numVertices > 0); 1310 ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr); 1311 distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE; 1312 if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph"); 1313 } 1314 #endif 1315 if (!numVertices) { 1316 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1317 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1318 ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1319 PetscFunctionReturn(0); 1320 } 1321 FREE_GRAPH = 0; /* Do not let Chaco free my memory */ 1322 for (i = 0; i < start[numVertices]; ++i) ++adjacency[i]; 1323 1324 if (global_method == INERTIAL_METHOD) { 1325 /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */ 1326 SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported"); 1327 } 1328 mesh_dims[0] = nparts; 1329 mesh_dims[1] = 1; 1330 mesh_dims[2] = 1; 1331 ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr); 1332 /* Chaco outputs to stdout. We redirect this to a buffer. */ 1333 /* TODO: check error codes for UNIX calls */ 1334 #if defined(PETSC_HAVE_UNISTD_H) 1335 { 1336 int piperet; 1337 piperet = pipe(fd_pipe); 1338 if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe"); 1339 fd_stdout = dup(1); 1340 close(1); 1341 dup2(fd_pipe[1], 1); 1342 } 1343 #endif 1344 ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename, 1345 assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag, 1346 vmax, ndims, eigtol, seed); 1347 #if defined(PETSC_HAVE_UNISTD_H) 1348 { 1349 char msgLog[10000]; 1350 int count; 1351 1352 fflush(stdout); 1353 count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char)); 1354 if (count < 0) count = 0; 1355 msgLog[count] = 0; 1356 close(1); 1357 dup2(fd_stdout, 1); 1358 close(fd_stdout); 1359 close(fd_pipe[0]); 1360 close(fd_pipe[1]); 1361 if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog); 1362 } 1363 #else 1364 if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout"); 1365 #endif 1366 /* Convert to PetscSection+IS */ 1367 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1368 for (v = 0; v < nvtxs; ++v) { 1369 ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr); 1370 } 1371 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1372 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1373 for (p = 0, i = 0; p < nparts; ++p) { 1374 for (v = 0; v < nvtxs; ++v) { 1375 if (assignment[v] == p) points[i++] = v; 1376 } 1377 } 1378 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1379 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1380 if (global_method == INERTIAL_METHOD) { 1381 /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */ 1382 } 1383 ierr = PetscFree(assignment);CHKERRQ(ierr); 1384 for (i = 0; i < start[numVertices]; ++i) --adjacency[i]; 1385 PetscFunctionReturn(0); 1386 #else 1387 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco."); 1388 #endif 1389 } 1390 1391 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part) 1392 { 1393 PetscFunctionBegin; 1394 part->ops->view = PetscPartitionerView_Chaco; 1395 part->ops->destroy = PetscPartitionerDestroy_Chaco; 1396 part->ops->partition = PetscPartitionerPartition_Chaco; 1397 PetscFunctionReturn(0); 1398 } 1399 1400 /*MC 1401 PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library 1402 1403 Level: intermediate 1404 1405 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1406 M*/ 1407 1408 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part) 1409 { 1410 PetscPartitioner_Chaco *p; 1411 PetscErrorCode ierr; 1412 1413 PetscFunctionBegin; 1414 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1415 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1416 part->data = p; 1417 1418 ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr); 1419 ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr); 1420 PetscFunctionReturn(0); 1421 } 1422 1423 static const char *ptypes[] = {"kway", "rb"}; 1424 1425 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part) 1426 { 1427 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 1428 PetscErrorCode ierr; 1429 1430 PetscFunctionBegin; 1431 ierr = PetscFree(p);CHKERRQ(ierr); 1432 PetscFunctionReturn(0); 1433 } 1434 1435 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer) 1436 { 1437 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 1438 PetscErrorCode ierr; 1439 1440 PetscFunctionBegin; 1441 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1442 ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr); 1443 ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr); 1444 ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr); 1445 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1446 PetscFunctionReturn(0); 1447 } 1448 1449 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer) 1450 { 1451 PetscBool iascii; 1452 PetscErrorCode ierr; 1453 1454 PetscFunctionBegin; 1455 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1456 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1457 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1458 if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);} 1459 PetscFunctionReturn(0); 1460 } 1461 1462 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part) 1463 { 1464 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 1465 PetscErrorCode ierr; 1466 1467 PetscFunctionBegin; 1468 ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr); 1469 ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr); 1470 ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr); 1471 ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr); 1472 ierr = PetscOptionsTail();CHKERRQ(ierr); 1473 PetscFunctionReturn(0); 1474 } 1475 1476 #if defined(PETSC_HAVE_PARMETIS) 1477 #include <parmetis.h> 1478 #endif 1479 1480 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1481 { 1482 #if defined(PETSC_HAVE_PARMETIS) 1483 PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data; 1484 MPI_Comm comm; 1485 PetscSection section; 1486 PetscInt nvtxs = numVertices; /* The number of vertices in full graph */ 1487 PetscInt *vtxdist; /* Distribution of vertices across processes */ 1488 PetscInt *xadj = start; /* Start of edge list for each vertex */ 1489 PetscInt *adjncy = adjacency; /* Edge lists for all vertices */ 1490 PetscInt *vwgt = NULL; /* Vertex weights */ 1491 PetscInt *adjwgt = NULL; /* Edge weights */ 1492 PetscInt wgtflag = 0; /* Indicates which weights are present */ 1493 PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */ 1494 PetscInt ncon = 1; /* The number of weights per vertex */ 1495 PetscInt metis_ptype = pm->ptype; /* kway or recursive bisection */ 1496 real_t *tpwgts; /* The fraction of vertex weights assigned to each partition */ 1497 real_t *ubvec; /* The balance intolerance for vertex weights */ 1498 PetscInt options[64]; /* Options */ 1499 /* Outputs */ 1500 PetscInt v, i, *assignment, *points; 1501 PetscMPIInt size, rank, p; 1502 PetscErrorCode ierr; 1503 1504 PetscFunctionBegin; 1505 ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr); 1506 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1507 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1508 /* Calculate vertex distribution */ 1509 ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr); 1510 vtxdist[0] = 0; 1511 ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr); 1512 for (p = 2; p <= size; ++p) { 1513 vtxdist[p] += vtxdist[p-1]; 1514 } 1515 /* Calculate partition weights */ 1516 for (p = 0; p < nparts; ++p) { 1517 tpwgts[p] = 1.0/nparts; 1518 } 1519 ubvec[0] = pm->imbalanceRatio; 1520 /* Weight cells by dofs on cell by default */ 1521 ierr = DMGetSection(dm, §ion);CHKERRQ(ierr); 1522 if (section) { 1523 PetscInt cStart, cEnd, dof; 1524 1525 ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr); 1526 for (v = cStart; v < cEnd; ++v) { 1527 ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr); 1528 /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */ 1529 /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */ 1530 if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1); 1531 } 1532 } else { 1533 for (v = 0; v < nvtxs; ++v) vwgt[v] = 1; 1534 } 1535 wgtflag |= 2; /* have weights on graph vertices */ 1536 1537 if (nparts == 1) { 1538 ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr); 1539 } else { 1540 for (p = 0; !vtxdist[p+1] && p < size; ++p); 1541 if (vtxdist[p+1] == vtxdist[size]) { 1542 if (rank == p) { 1543 ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */ 1544 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()"); 1545 if (metis_ptype == 1) { 1546 PetscStackPush("METIS_PartGraphRecursive"); 1547 ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment); 1548 PetscStackPop; 1549 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()"); 1550 } else { 1551 /* 1552 It would be nice to activate the two options below, but they would need some actual testing. 1553 - Turning on these options may exercise path of the METIS code that have bugs and may break production runs. 1554 - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case. 1555 */ 1556 /* options[METIS_OPTION_CONTIG] = 1; */ /* try to produce partitions that are contiguous */ 1557 /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */ 1558 PetscStackPush("METIS_PartGraphKway"); 1559 ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment); 1560 PetscStackPop; 1561 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()"); 1562 } 1563 } 1564 } else { 1565 options[0] = 1; 1566 options[1] = pm->debugFlag; 1567 PetscStackPush("ParMETIS_V3_PartKway"); 1568 ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm); 1569 PetscStackPop; 1570 if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr); 1571 } 1572 } 1573 /* Convert to PetscSection+IS */ 1574 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1575 for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);} 1576 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1577 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1578 for (p = 0, i = 0; p < nparts; ++p) { 1579 for (v = 0; v < nvtxs; ++v) { 1580 if (assignment[v] == p) points[i++] = v; 1581 } 1582 } 1583 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1584 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1585 ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr); 1586 PetscFunctionReturn(0); 1587 #else 1588 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis."); 1589 #endif 1590 } 1591 1592 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part) 1593 { 1594 PetscFunctionBegin; 1595 part->ops->view = PetscPartitionerView_ParMetis; 1596 part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis; 1597 part->ops->destroy = PetscPartitionerDestroy_ParMetis; 1598 part->ops->partition = PetscPartitionerPartition_ParMetis; 1599 PetscFunctionReturn(0); 1600 } 1601 1602 /*MC 1603 PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library 1604 1605 Level: intermediate 1606 1607 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1608 M*/ 1609 1610 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part) 1611 { 1612 PetscPartitioner_ParMetis *p; 1613 PetscErrorCode ierr; 1614 1615 PetscFunctionBegin; 1616 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1617 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1618 part->data = p; 1619 1620 p->ptype = 0; 1621 p->imbalanceRatio = 1.05; 1622 p->debugFlag = 0; 1623 1624 ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr); 1625 ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr); 1626 PetscFunctionReturn(0); 1627 } 1628 1629 1630 PetscBool PTScotchPartitionercite = PETSC_FALSE; 1631 const char PTScotchPartitionerCitation[] = 1632 "@article{PTSCOTCH,\n" 1633 " author = {C. Chevalier and F. Pellegrini},\n" 1634 " title = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n" 1635 " journal = {Parallel Computing},\n" 1636 " volume = {34},\n" 1637 " number = {6},\n" 1638 " pages = {318--331},\n" 1639 " year = {2008},\n" 1640 " doi = {https://doi.org/10.1016/j.parco.2007.12.001}\n" 1641 "}\n"; 1642 1643 typedef struct { 1644 PetscInt strategy; 1645 PetscReal imbalance; 1646 } PetscPartitioner_PTScotch; 1647 1648 static const char *const 1649 PTScotchStrategyList[] = { 1650 "DEFAULT", 1651 "QUALITY", 1652 "SPEED", 1653 "BALANCE", 1654 "SAFETY", 1655 "SCALABILITY", 1656 "RECURSIVE", 1657 "REMAP" 1658 }; 1659 1660 #if defined(PETSC_HAVE_PTSCOTCH) 1661 1662 EXTERN_C_BEGIN 1663 #include <ptscotch.h> 1664 EXTERN_C_END 1665 1666 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0) 1667 1668 static int PTScotch_Strategy(PetscInt strategy) 1669 { 1670 switch (strategy) { 1671 case 0: return SCOTCH_STRATDEFAULT; 1672 case 1: return SCOTCH_STRATQUALITY; 1673 case 2: return SCOTCH_STRATSPEED; 1674 case 3: return SCOTCH_STRATBALANCE; 1675 case 4: return SCOTCH_STRATSAFETY; 1676 case 5: return SCOTCH_STRATSCALABILITY; 1677 case 6: return SCOTCH_STRATRECURSIVE; 1678 case 7: return SCOTCH_STRATREMAP; 1679 default: return SCOTCH_STRATDEFAULT; 1680 } 1681 } 1682 1683 1684 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[], 1685 SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[]) 1686 { 1687 SCOTCH_Graph grafdat; 1688 SCOTCH_Strat stradat; 1689 SCOTCH_Num vertnbr = n; 1690 SCOTCH_Num edgenbr = xadj[n]; 1691 SCOTCH_Num* velotab = vtxwgt; 1692 SCOTCH_Num* edlotab = adjwgt; 1693 SCOTCH_Num flagval = strategy; 1694 double kbalval = imbalance; 1695 PetscErrorCode ierr; 1696 1697 PetscFunctionBegin; 1698 { 1699 PetscBool flg = PETSC_TRUE; 1700 ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr); 1701 if (!flg) velotab = NULL; 1702 } 1703 ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr); 1704 ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr); 1705 ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr); 1706 ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr); 1707 #if defined(PETSC_USE_DEBUG) 1708 ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr); 1709 #endif 1710 ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr); 1711 SCOTCH_stratExit(&stradat); 1712 SCOTCH_graphExit(&grafdat); 1713 PetscFunctionReturn(0); 1714 } 1715 1716 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[], 1717 SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm) 1718 { 1719 PetscMPIInt procglbnbr; 1720 PetscMPIInt proclocnum; 1721 SCOTCH_Arch archdat; 1722 SCOTCH_Dgraph grafdat; 1723 SCOTCH_Dmapping mappdat; 1724 SCOTCH_Strat stradat; 1725 SCOTCH_Num vertlocnbr; 1726 SCOTCH_Num edgelocnbr; 1727 SCOTCH_Num* veloloctab = vtxwgt; 1728 SCOTCH_Num* edloloctab = adjwgt; 1729 SCOTCH_Num flagval = strategy; 1730 double kbalval = imbalance; 1731 PetscErrorCode ierr; 1732 1733 PetscFunctionBegin; 1734 { 1735 PetscBool flg = PETSC_TRUE; 1736 ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr); 1737 if (!flg) veloloctab = NULL; 1738 } 1739 ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr); 1740 ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr); 1741 vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum]; 1742 edgelocnbr = xadj[vertlocnbr]; 1743 1744 ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr); 1745 ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr); 1746 /*#if defined(PETSC_USE_DEBUG)*/ 1747 ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr); 1748 /*#endif*/ 1749 ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr); 1750 ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr); 1751 ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr); 1752 ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr); 1753 ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr); 1754 1755 /*PetscSynchronizedPrintf(comm,"[%d] part: ", proclocnum);*/ 1756 /*for(int i=0;i<vertlocnbr;i++) {*/ 1757 /* PetscSynchronizedPrintf(comm, "%i ", part[i]);*/ 1758 /*}*/ 1759 /*PetscSynchronizedPrintf(comm, "\n");*/ 1760 /*PetscSynchronizedFlush(comm, PETSC_STDOUT);*/ 1761 /*PetscSynchronizedPrintf(comm,"[%d] veloloctab: ", proclocnum);*/ 1762 /*for(int i=0;i<vertlocnbr;i++) {*/ 1763 /* PetscSynchronizedPrintf(comm, "%i ", veloloctab[i]);*/ 1764 /*}*/ 1765 /*PetscSynchronizedPrintf(comm, "\n");*/ 1766 /*PetscSynchronizedFlush(comm, PETSC_STDOUT);*/ 1767 ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr); 1768 SCOTCH_dgraphMapExit(&grafdat, &mappdat); 1769 SCOTCH_archExit(&archdat); 1770 SCOTCH_stratExit(&stradat); 1771 SCOTCH_dgraphExit(&grafdat); 1772 PetscFunctionReturn(0); 1773 } 1774 1775 #endif /* PETSC_HAVE_PTSCOTCH */ 1776 1777 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part) 1778 { 1779 PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data; 1780 PetscErrorCode ierr; 1781 1782 PetscFunctionBegin; 1783 ierr = PetscFree(p);CHKERRQ(ierr); 1784 PetscFunctionReturn(0); 1785 } 1786 1787 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer) 1788 { 1789 PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data; 1790 PetscErrorCode ierr; 1791 1792 PetscFunctionBegin; 1793 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1794 ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr); 1795 ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr); 1796 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1797 PetscFunctionReturn(0); 1798 } 1799 1800 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer) 1801 { 1802 PetscBool iascii; 1803 PetscErrorCode ierr; 1804 1805 PetscFunctionBegin; 1806 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1807 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1808 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1809 if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);} 1810 PetscFunctionReturn(0); 1811 } 1812 1813 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part) 1814 { 1815 PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data; 1816 const char *const *slist = PTScotchStrategyList; 1817 PetscInt nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0])); 1818 PetscBool flag; 1819 PetscErrorCode ierr; 1820 1821 PetscFunctionBegin; 1822 ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr); 1823 ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr); 1824 ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr); 1825 ierr = PetscOptionsTail();CHKERRQ(ierr); 1826 PetscFunctionReturn(0); 1827 } 1828 1829 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1830 { 1831 #if defined(PETSC_HAVE_PTSCOTCH) 1832 MPI_Comm comm = PetscObjectComm((PetscObject)part); 1833 PetscInt nvtxs = numVertices; /* The number of vertices in full graph */ 1834 PetscInt *vtxdist; /* Distribution of vertices across processes */ 1835 PetscInt *xadj = start; /* Start of edge list for each vertex */ 1836 PetscInt *adjncy = adjacency; /* Edge lists for all vertices */ 1837 PetscInt *vwgt = NULL; /* Vertex weights */ 1838 PetscInt *adjwgt = NULL; /* Edge weights */ 1839 PetscInt v, i, *assignment, *points; 1840 PetscMPIInt size, rank, p; 1841 PetscErrorCode ierr; 1842 1843 PetscFunctionBegin; 1844 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1845 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1846 ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr); 1847 1848 /* Calculate vertex distribution */ 1849 vtxdist[0] = 0; 1850 ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr); 1851 for (p = 2; p <= size; ++p) { 1852 vtxdist[p] += vtxdist[p-1]; 1853 } 1854 1855 if (nparts == 1) { 1856 ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr); 1857 } else { 1858 PetscSection section; 1859 /* Weight cells by dofs on cell by default */ 1860 ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr); 1861 ierr = DMGetSection(dm, §ion);CHKERRQ(ierr); 1862 if (section) { 1863 PetscInt vStart, vEnd, dof; 1864 ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr); 1865 for (v = vStart; v < vEnd; ++v) { 1866 ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr); 1867 /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */ 1868 /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */ 1869 if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1); 1870 } 1871 } else { 1872 for (v = 0; v < nvtxs; ++v) vwgt[v] = 1; 1873 } 1874 { 1875 PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data; 1876 int strat = PTScotch_Strategy(pts->strategy); 1877 double imbal = (double)pts->imbalance; 1878 1879 for (p = 0; !vtxdist[p+1] && p < size; ++p); 1880 if (vtxdist[p+1] == vtxdist[size]) { 1881 if (rank == p) { 1882 ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr); 1883 } 1884 } else { 1885 ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr); 1886 } 1887 } 1888 ierr = PetscFree(vwgt);CHKERRQ(ierr); 1889 } 1890 1891 /* Convert to PetscSection+IS */ 1892 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1893 for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);} 1894 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1895 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1896 for (p = 0, i = 0; p < nparts; ++p) { 1897 for (v = 0; v < nvtxs; ++v) { 1898 if (assignment[v] == p) points[i++] = v; 1899 } 1900 } 1901 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1902 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1903 1904 ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr); 1905 PetscFunctionReturn(0); 1906 #else 1907 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch."); 1908 #endif 1909 } 1910 1911 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part) 1912 { 1913 PetscFunctionBegin; 1914 part->ops->view = PetscPartitionerView_PTScotch; 1915 part->ops->destroy = PetscPartitionerDestroy_PTScotch; 1916 part->ops->partition = PetscPartitionerPartition_PTScotch; 1917 part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch; 1918 PetscFunctionReturn(0); 1919 } 1920 1921 /*MC 1922 PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library 1923 1924 Level: intermediate 1925 1926 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1927 M*/ 1928 1929 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part) 1930 { 1931 PetscPartitioner_PTScotch *p; 1932 PetscErrorCode ierr; 1933 1934 PetscFunctionBegin; 1935 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1936 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1937 part->data = p; 1938 1939 p->strategy = 0; 1940 p->imbalance = 0.01; 1941 1942 ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr); 1943 ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr); 1944 PetscFunctionReturn(0); 1945 } 1946 1947 1948 /*@ 1949 DMPlexGetPartitioner - Get the mesh partitioner 1950 1951 Not collective 1952 1953 Input Parameter: 1954 . dm - The DM 1955 1956 Output Parameter: 1957 . part - The PetscPartitioner 1958 1959 Level: developer 1960 1961 Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner. 1962 1963 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate() 1964 @*/ 1965 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part) 1966 { 1967 DM_Plex *mesh = (DM_Plex *) dm->data; 1968 1969 PetscFunctionBegin; 1970 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1971 PetscValidPointer(part, 2); 1972 *part = mesh->partitioner; 1973 PetscFunctionReturn(0); 1974 } 1975 1976 /*@ 1977 DMPlexSetPartitioner - Set the mesh partitioner 1978 1979 logically collective on dm and part 1980 1981 Input Parameters: 1982 + dm - The DM 1983 - part - The partitioner 1984 1985 Level: developer 1986 1987 Note: Any existing PetscPartitioner will be destroyed. 1988 1989 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate() 1990 @*/ 1991 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part) 1992 { 1993 DM_Plex *mesh = (DM_Plex *) dm->data; 1994 PetscErrorCode ierr; 1995 1996 PetscFunctionBegin; 1997 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1998 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2); 1999 ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr); 2000 ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr); 2001 mesh->partitioner = part; 2002 PetscFunctionReturn(0); 2003 } 2004 2005 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down) 2006 { 2007 PetscErrorCode ierr; 2008 2009 PetscFunctionBegin; 2010 if (up) { 2011 PetscInt parent; 2012 2013 ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr); 2014 if (parent != point) { 2015 PetscInt closureSize, *closure = NULL, i; 2016 2017 ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr); 2018 for (i = 0; i < closureSize; i++) { 2019 PetscInt cpoint = closure[2*i]; 2020 2021 ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr); 2022 ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr); 2023 } 2024 ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr); 2025 } 2026 } 2027 if (down) { 2028 PetscInt numChildren; 2029 const PetscInt *children; 2030 2031 ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr); 2032 if (numChildren) { 2033 PetscInt i; 2034 2035 for (i = 0; i < numChildren; i++) { 2036 PetscInt cpoint = children[i]; 2037 2038 ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr); 2039 ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr); 2040 } 2041 } 2042 } 2043 PetscFunctionReturn(0); 2044 } 2045 2046 /*@ 2047 DMPlexPartitionLabelClosure - Add the closure of all points to the partition label 2048 2049 Input Parameters: 2050 + dm - The DM 2051 - label - DMLabel assinging ranks to remote roots 2052 2053 Level: developer 2054 2055 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2056 @*/ 2057 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label) 2058 { 2059 IS rankIS, pointIS; 2060 const PetscInt *ranks, *points; 2061 PetscInt numRanks, numPoints, r, p, c, closureSize; 2062 PetscInt *closure = NULL; 2063 DM_Plex *mesh = (DM_Plex *)dm->data; 2064 PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE; 2065 PetscErrorCode ierr; 2066 2067 PetscFunctionBegin; 2068 ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr); 2069 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2070 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2071 2072 for (r = 0; r < numRanks; ++r) { 2073 const PetscInt rank = ranks[r]; 2074 PetscHSetI ht; 2075 PetscInt nelems, *elems, off = 0; 2076 2077 ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr); 2078 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 2079 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 2080 ierr = PetscHSetICreate(&ht);CHKERRQ(ierr); 2081 ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr); 2082 for (p = 0; p < numPoints; ++p) { 2083 ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 2084 for (c = 0; c < closureSize*2; c += 2) { 2085 ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr); 2086 if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);} 2087 } 2088 } 2089 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 2090 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2091 ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr); 2092 ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr); 2093 ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr); 2094 ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr); 2095 ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr); 2096 ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, &pointIS);CHKERRQ(ierr); 2097 ierr = DMLabelSetStratumIS(label, rank, pointIS);CHKERRQ(ierr); 2098 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2099 } 2100 2101 if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);} 2102 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2103 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2104 PetscFunctionReturn(0); 2105 } 2106 2107 /*@ 2108 DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label 2109 2110 Input Parameters: 2111 + dm - The DM 2112 - label - DMLabel assinging ranks to remote roots 2113 2114 Level: developer 2115 2116 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2117 @*/ 2118 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label) 2119 { 2120 IS rankIS, pointIS; 2121 const PetscInt *ranks, *points; 2122 PetscInt numRanks, numPoints, r, p, a, adjSize; 2123 PetscInt *adj = NULL; 2124 PetscErrorCode ierr; 2125 2126 PetscFunctionBegin; 2127 ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr); 2128 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2129 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2130 for (r = 0; r < numRanks; ++r) { 2131 const PetscInt rank = ranks[r]; 2132 2133 ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr); 2134 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 2135 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 2136 for (p = 0; p < numPoints; ++p) { 2137 adjSize = PETSC_DETERMINE; 2138 ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr); 2139 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);} 2140 } 2141 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 2142 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2143 } 2144 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2145 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2146 ierr = PetscFree(adj);CHKERRQ(ierr); 2147 PetscFunctionReturn(0); 2148 } 2149 2150 /*@ 2151 DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF 2152 2153 Input Parameters: 2154 + dm - The DM 2155 - label - DMLabel assinging ranks to remote roots 2156 2157 Level: developer 2158 2159 Note: This is required when generating multi-level overlaps to capture 2160 overlap points from non-neighbouring partitions. 2161 2162 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2163 @*/ 2164 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label) 2165 { 2166 MPI_Comm comm; 2167 PetscMPIInt rank; 2168 PetscSF sfPoint; 2169 DMLabel lblRoots, lblLeaves; 2170 IS rankIS, pointIS; 2171 const PetscInt *ranks; 2172 PetscInt numRanks, r; 2173 PetscErrorCode ierr; 2174 2175 PetscFunctionBegin; 2176 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 2177 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2178 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 2179 /* Pull point contributions from remote leaves into local roots */ 2180 ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr); 2181 ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr); 2182 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2183 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2184 for (r = 0; r < numRanks; ++r) { 2185 const PetscInt remoteRank = ranks[r]; 2186 if (remoteRank == rank) continue; 2187 ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr); 2188 ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr); 2189 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2190 } 2191 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2192 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2193 ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr); 2194 /* Push point contributions from roots into remote leaves */ 2195 ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr); 2196 ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr); 2197 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 2198 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 2199 for (r = 0; r < numRanks; ++r) { 2200 const PetscInt remoteRank = ranks[r]; 2201 if (remoteRank == rank) continue; 2202 ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr); 2203 ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr); 2204 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2205 } 2206 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 2207 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 2208 ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr); 2209 PetscFunctionReturn(0); 2210 } 2211 2212 /*@ 2213 DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label 2214 2215 Input Parameters: 2216 + dm - The DM 2217 . rootLabel - DMLabel assinging ranks to local roots 2218 . processSF - A star forest mapping into the local index on each remote rank 2219 2220 Output Parameter: 2221 - leafLabel - DMLabel assinging ranks to remote roots 2222 2223 Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The 2224 resulting leafLabel is a receiver mapping of remote roots to their parent rank. 2225 2226 Level: developer 2227 2228 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 2229 @*/ 2230 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel) 2231 { 2232 MPI_Comm comm; 2233 PetscMPIInt rank, size; 2234 PetscInt p, n, numNeighbors, ssize, l, nleaves; 2235 PetscSF sfPoint; 2236 PetscSFNode *rootPoints, *leafPoints; 2237 PetscSection rootSection, leafSection; 2238 const PetscSFNode *remote; 2239 const PetscInt *local, *neighbors; 2240 IS valueIS; 2241 PetscErrorCode ierr; 2242 2243 PetscFunctionBegin; 2244 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 2245 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2246 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 2247 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 2248 2249 /* Convert to (point, rank) and use actual owners */ 2250 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 2251 ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr); 2252 ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr); 2253 ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr); 2254 ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr); 2255 for (n = 0; n < numNeighbors; ++n) { 2256 PetscInt numPoints; 2257 2258 ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr); 2259 ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr); 2260 } 2261 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 2262 ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr); 2263 ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr); 2264 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 2265 for (n = 0; n < numNeighbors; ++n) { 2266 IS pointIS; 2267 const PetscInt *points; 2268 PetscInt off, numPoints, p; 2269 2270 ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr); 2271 ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr); 2272 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 2273 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 2274 for (p = 0; p < numPoints; ++p) { 2275 if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);} 2276 else {l = -1;} 2277 if (l >= 0) {rootPoints[off+p] = remote[l];} 2278 else {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;} 2279 } 2280 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 2281 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 2282 } 2283 ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr); 2284 ierr = ISDestroy(&valueIS);CHKERRQ(ierr); 2285 /* Communicate overlap */ 2286 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr); 2287 ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr); 2288 /* Filter remote contributions (ovLeafPoints) into the overlapSF */ 2289 ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr); 2290 for (p = 0; p < ssize; p++) { 2291 ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr); 2292 } 2293 ierr = PetscFree(rootPoints);CHKERRQ(ierr); 2294 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 2295 ierr = PetscFree(leafPoints);CHKERRQ(ierr); 2296 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 2297 PetscFunctionReturn(0); 2298 } 2299 2300 /*@ 2301 DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points 2302 2303 Input Parameters: 2304 + dm - The DM 2305 . label - DMLabel assinging ranks to remote roots 2306 2307 Output Parameter: 2308 - sf - The star forest communication context encapsulating the defined mapping 2309 2310 Note: The incoming label is a receiver mapping of remote points to their parent rank. 2311 2312 Level: developer 2313 2314 .seealso: DMPlexDistribute(), DMPlexCreateOverlap 2315 @*/ 2316 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf) 2317 { 2318 PetscMPIInt rank, size; 2319 PetscInt n, numRemote, p, numPoints, pStart, pEnd, idx = 0; 2320 PetscSFNode *remotePoints; 2321 IS remoteRootIS; 2322 const PetscInt *remoteRoots; 2323 PetscErrorCode ierr; 2324 2325 PetscFunctionBegin; 2326 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2327 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr); 2328 2329 for (numRemote = 0, n = 0; n < size; ++n) { 2330 ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr); 2331 numRemote += numPoints; 2332 } 2333 ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr); 2334 /* Put owned points first */ 2335 ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr); 2336 if (numPoints > 0) { 2337 ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr); 2338 ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2339 for (p = 0; p < numPoints; p++) { 2340 remotePoints[idx].index = remoteRoots[p]; 2341 remotePoints[idx].rank = rank; 2342 idx++; 2343 } 2344 ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2345 ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr); 2346 } 2347 /* Now add remote points */ 2348 for (n = 0; n < size; ++n) { 2349 ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr); 2350 if (numPoints <= 0 || n == rank) continue; 2351 ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr); 2352 ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2353 for (p = 0; p < numPoints; p++) { 2354 remotePoints[idx].index = remoteRoots[p]; 2355 remotePoints[idx].rank = n; 2356 idx++; 2357 } 2358 ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 2359 ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr); 2360 } 2361 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr); 2362 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 2363 ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 2364 PetscFunctionReturn(0); 2365 } 2366 2367 /*@ 2368 DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. 2369 2370 Input parameters: 2371 + dm - The DMPlex object 2372 2373 Level: user 2374 2375 @*/ 2376 2377 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscBool useInitialGuess, PetscBool parallel) 2378 { 2379 #if defined(PETSC_HAVE_PARMETIS) 2380 PetscSF sf; 2381 PetscInt ierr, i, j; 2382 PetscInt vBegin, vEnd, nroots, nleafs, pStart, pEnd, sumDegrees; 2383 const PetscInt *degree, *ilocal; 2384 const PetscSFNode *iremote; /* Do I need to free these? */ 2385 PetscBool *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned; 2386 PetscInt numExclusivelyOwned, numNonExclusivelyOwned; 2387 PetscMPIInt rank, size; 2388 PetscInt *numLocalVerticesAllProcesses, *cumSumVertices, *cumSumDegrees, *globalNumbersOfLocalOwnedVertices, *locationsOfLeafs, *rankOnLeafs, *remoteLocalPointOfLeafs, *points, *leafGlobalNumbers; 2389 PetscInt offset, counter; 2390 PetscInt lenadjncy, numNonExclusivelyOwnedConnectTo, numLeafs; 2391 PetscInt *xadj, *adjncy, *vtxwgt; 2392 PetscInt lenxadj; 2393 2394 PetscInt *adjwgt=NULL; 2395 PetscInt *part, *options; 2396 PetscInt nparts, wgtflag, numflag, ncon, edgecut; 2397 real_t *ubvec; 2398 PetscInt *firstVertices, *renumbering; 2399 PetscInt failed, failedGlobal; 2400 MPI_Comm comm; 2401 PetscInt leafCounter; 2402 PetscInt *leafsNew; 2403 PetscSFNode *leafLocationsNew; 2404 Mat A; 2405 2406 ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr); 2407 2408 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 2409 2410 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2411 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 2412 if(size==1) PetscFunctionReturn(0); 2413 2414 /* Figure out all points in the plex that we are interested in balancing. In 2415 * this case we are just interested in vertices. Should generalize that at 2416 * some point. */ 2417 ierr = DMPlexGetDepthStratum(dm, 0, &vBegin, &vEnd);CHKERRQ(ierr); 2418 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 2419 ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr); 2420 2421 for(i=0; i<pEnd-pStart; i++) { 2422 toBalance[i] = (i-pStart>=vBegin && i-pStart<vEnd); 2423 } 2424 2425 2426 /* There are three types of points: 2427 * exclusivelyOwned: points that are owned by this process and only seen by this process 2428 * nonExclusivelyOwned: points that are owned by this process but seen by at least another process 2429 * leaf: a point that is seen by this process but owned by a different process 2430 */ 2431 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 2432 ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr); 2433 ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr); 2434 ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr); 2435 ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr); 2436 for(i=0; i<pEnd-pStart; i++) { 2437 isNonExclusivelyOwned[i] = PETSC_FALSE; 2438 isExclusivelyOwned[i] = PETSC_FALSE; 2439 isLeaf[i] = PETSC_FALSE; 2440 } 2441 2442 2443 /* start by marking all the leafs */ 2444 for(i=0; i<nleafs; i++) { 2445 isLeaf[ilocal[i]-pStart] = PETSC_TRUE; 2446 } 2447 2448 /* for an owned point, we can figure out whether another processor sees it or 2449 * not by calculating its degree */ 2450 ierr = PetscSFComputeDegreeBegin(sf, °ree);CHKERRQ(ierr); 2451 ierr = PetscSFComputeDegreeEnd(sf, °ree);CHKERRQ(ierr); 2452 2453 numExclusivelyOwned=0; 2454 numNonExclusivelyOwned=0; 2455 for(i=0; i<pEnd-pStart; i++) { 2456 if(toBalance[i]) { 2457 if(degree[i] > 0) { 2458 isNonExclusivelyOwned[i] = PETSC_TRUE; 2459 numNonExclusivelyOwned += 1; 2460 } else { 2461 if(!isLeaf[i]) { 2462 isExclusivelyOwned[i] = PETSC_TRUE; 2463 numExclusivelyOwned += 1; 2464 } 2465 } 2466 } 2467 } 2468 2469 2470 /* We are going to build a graph with one vertex per core representing the 2471 * exclusively owned points and then one vertex per nonExclusively owned 2472 * point. */ 2473 ierr = PetscMalloc1(size, &numLocalVerticesAllProcesses);CHKERRQ(ierr); 2474 numLocalVerticesAllProcesses[rank] = 1 + numNonExclusivelyOwned; 2475 /* For numbering purposes we need to figure out how many vertices are on each core. */ 2476 MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numLocalVerticesAllProcesses,1,MPIU_INT,comm);CHKERRQ(ierr); 2477 2478 ierr = PetscMalloc1(size+1, &cumSumVertices);CHKERRQ(ierr); 2479 cumSumVertices[0] = 0; 2480 for(i=1; i<=size; i++) { 2481 cumSumVertices[i] = cumSumVertices[i-1] + numLocalVerticesAllProcesses[i-1]; 2482 } 2483 ierr = PetscFree(numLocalVerticesAllProcesses);CHKERRQ(ierr); 2484 ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr); 2485 cumSumDegrees[0] = 0; 2486 for(i=1; i<=pEnd-pStart; i++) { 2487 cumSumDegrees[i] = cumSumDegrees[i-1] + degree[i-1]; 2488 } 2489 ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr); 2490 2491 offset = cumSumVertices[rank]; 2492 counter = 0; 2493 for(i=0; i<pEnd-pStart; i++) { 2494 if(toBalance[i]) { 2495 if(degree[i] > 0) { 2496 globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset; 2497 counter++; 2498 } 2499 } 2500 } 2501 2502 /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */ 2503 sumDegrees = 0; 2504 for(i=0; i<pEnd-pStart; i++) { 2505 sumDegrees += degree[i]; 2506 } 2507 2508 ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr); 2509 ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr); 2510 for(i=0; i<pEnd-pStart; i++) { 2511 rankOnLeafs[i] = rank; 2512 } 2513 ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr); 2514 ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr); 2515 ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr); 2516 2517 /* get the remote local points of my leaves */ 2518 ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr); 2519 ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr); 2520 for(i=0; i<pEnd-pStart; i++) { 2521 points[i] = pStart+i; 2522 } 2523 ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr); 2524 ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr); 2525 ierr = PetscFree(points);CHKERRQ(ierr); 2526 2527 /* send the global numbers of vertices I own to the leafs so that they know to connect to it */ 2528 ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr); 2529 ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr); 2530 ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr); 2531 2532 /* Now start building the data structur for ParMETIS */ 2533 /* vtxdist = cum_sum_vertices */ 2534 2535 numNonExclusivelyOwnedConnectTo = 0; 2536 numLeafs = 0; 2537 for(i=0; i<pEnd-pStart; i++) { 2538 if(toBalance[i]) { 2539 if(isNonExclusivelyOwned[i]) { 2540 numNonExclusivelyOwnedConnectTo += 1 + degree[i]; 2541 } 2542 if(isLeaf[i]) { 2543 numLeafs++; 2544 } 2545 } 2546 } 2547 2548 2549 2550 ierr = MatCreate(comm, &A);CHKERRQ(ierr); 2551 ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr); 2552 ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 2553 ierr = MatSetUp(A);CHKERRQ(ierr); 2554 2555 for(i=0; i<pEnd-pStart; i++) { 2556 if(toBalance[i]) { 2557 if(isNonExclusivelyOwned[i]) { 2558 PetscInt adjSize = PETSC_DETERMINE; 2559 PetscInt *adj = NULL; 2560 ierr = MatSetValue(A, cumSumVertices[rank], globalNumbersOfLocalOwnedVertices[i], 1, INSERT_VALUES);CHKERRQ(ierr); 2561 ierr = MatSetValue(A, globalNumbersOfLocalOwnedVertices[i], cumSumVertices[rank], 1, INSERT_VALUES);CHKERRQ(ierr); 2562 ierr = DMPlexGetAdjacency(dm, i+pStart, &adjSize, &adj);CHKERRQ(ierr); 2563 adjSize = 0; 2564 for(j=0; j<adjSize; j++) { 2565 if(toBalance[adj[j]] && i+pStart != adj[j]) { 2566 if(isNonExclusivelyOwned[adj[j]]) { 2567 ierr = MatSetValue(A, globalNumbersOfLocalOwnedVertices[i], globalNumbersOfLocalOwnedVertices[adj[j]], 1, INSERT_VALUES);CHKERRQ(ierr); 2568 ierr = MatSetValue(A, globalNumbersOfLocalOwnedVertices[adj[j]], globalNumbersOfLocalOwnedVertices[i], 1, INSERT_VALUES);CHKERRQ(ierr); 2569 } 2570 else if(isLeaf[adj[j]]) { 2571 ierr = MatSetValue(A, globalNumbersOfLocalOwnedVertices[i], leafGlobalNumbers[adj[j]], 1, INSERT_VALUES);CHKERRQ(ierr); 2572 ierr = MatSetValue(A, leafGlobalNumbers[adj[j]], globalNumbersOfLocalOwnedVertices[i], 1, INSERT_VALUES);CHKERRQ(ierr); 2573 } 2574 } 2575 } 2576 ierr = PetscFree(adj);CHKERRQ(ierr); 2577 } else if (isLeaf[i]) { 2578 PetscInt adjSize = PETSC_DETERMINE; 2579 PetscInt *adj = NULL; 2580 ierr = MatSetValue(A, cumSumVertices[rank], leafGlobalNumbers[i], 1, INSERT_VALUES);CHKERRQ(ierr); 2581 ierr = MatSetValue(A, leafGlobalNumbers[i], cumSumVertices[rank], 1, INSERT_VALUES);CHKERRQ(ierr); 2582 ierr = DMPlexGetAdjacency(dm, i+pStart, &adjSize, &adj);CHKERRQ(ierr); 2583 adjSize = 0; 2584 for(j=0; j<adjSize; j++) { 2585 if(toBalance[adj[j]] && i+pStart != adj[j]) { 2586 if(isNonExclusivelyOwned[adj[j]]) { 2587 ierr = MatSetValue(A, leafGlobalNumbers[i], globalNumbersOfLocalOwnedVertices[adj[j]], 1, INSERT_VALUES);CHKERRQ(ierr); 2588 ierr = MatSetValue(A, globalNumbersOfLocalOwnedVertices[adj[j]], leafGlobalNumbers[i], 1, INSERT_VALUES);CHKERRQ(ierr); 2589 } else if(isLeaf[adj[j]]) { 2590 ierr = MatSetValue(A, leafGlobalNumbers[i], leafGlobalNumbers[adj[j]], 1, INSERT_VALUES);CHKERRQ(ierr); 2591 ierr = MatSetValue(A, leafGlobalNumbers[adj[j]], leafGlobalNumbers[i], 1, INSERT_VALUES);CHKERRQ(ierr); 2592 } 2593 } 2594 } 2595 ierr = PetscFree(adj);CHKERRQ(ierr); 2596 } 2597 } 2598 } 2599 ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr); 2600 ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr); 2601 2602 ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2603 ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2604 /*MatView(A, PETSC_VIEWER_STDOUT_WORLD);*/ 2605 nparts = size; 2606 wgtflag = 2; 2607 2608 numflag = 0; 2609 ncon = 2; 2610 real_t *tpwgts; 2611 ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr); 2612 2613 for(i=0; i<ncon*nparts; i++) { 2614 tpwgts[i] = 1./(nparts); 2615 } 2616 2617 2618 ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr); 2619 ubvec[0] = 1.01; 2620 if(ncon>1) ubvec[1] = 1.01; 2621 ierr = PetscMalloc1(1, &options);CHKERRQ(ierr); 2622 options[0] = 0; 2623 lenadjncy = 0; 2624 for(i=0; i<1+numNonExclusivelyOwned; i++) { 2625 PetscInt temp=0; 2626 ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr); 2627 lenadjncy += temp; 2628 ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr); 2629 } 2630 ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr); 2631 lenxadj = 2 + numNonExclusivelyOwned; 2632 ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr); 2633 xadj[0] = 0; 2634 counter = 0; 2635 for(i=0; i<1+numNonExclusivelyOwned; i++) { 2636 PetscInt temp=0, *cols; 2637 ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr); 2638 for(j=0; j<temp; j++) { 2639 adjncy[counter+j] = cols[j]; 2640 } 2641 counter += temp; 2642 xadj[i+1] = counter; 2643 ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr); 2644 } 2645 2646 2647 ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr); 2648 ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr); 2649 vtxwgt[0] = numExclusivelyOwned; 2650 if(ncon>1) vtxwgt[1] = 1; 2651 for(i=0; i<numNonExclusivelyOwned; i++) { 2652 vtxwgt[ncon*(i+1)] = 1; 2653 if(ncon>1) vtxwgt[ncon*(i+1)+1] = 0; 2654 } 2655 2656 PetscPrintf(comm, "Graph size: %i\n", cumSumVertices[size]); 2657 if(parallel) { 2658 if(useInitialGuess) { 2659 PetscStackPush("ParMETIS_V3_RefineKway"); 2660 ierr = ParMETIS_V3_RefineKway(cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm); 2661 PetscStackPop; 2662 } else { 2663 PetscStackPush("ParMETIS_V3_PartKway"); 2664 ierr = ParMETIS_V3_PartKway(cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm); 2665 PetscStackPop; 2666 } 2667 } else { 2668 Mat As; 2669 PetscInt numRows; 2670 PetscInt *partGlobal; 2671 ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr); 2672 2673 PetscInt *numExclusivelyOwnedAll; 2674 ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr); 2675 numExclusivelyOwnedAll[rank] = numExclusivelyOwned; 2676 MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm); 2677 2678 ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr); 2679 ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr); 2680 if(rank == 0) { 2681 PetscInt *adjncy_g, *xadj_g, *vtxwgt_g; 2682 lenadjncy = 0; 2683 2684 for(i=0; i<numRows; i++) { 2685 PetscInt temp=0; 2686 ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr); 2687 lenadjncy += temp; 2688 ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr); 2689 } 2690 ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr); 2691 lenxadj = 1 + numRows; 2692 ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr); 2693 xadj_g[0] = 0; 2694 counter = 0; 2695 for(i=0; i<numRows; i++) { 2696 PetscInt temp=0, *cols; 2697 ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr); 2698 for(j=0; j<temp; j++) { 2699 adjncy_g[counter+j] = cols[j]; 2700 } 2701 counter += temp; 2702 xadj_g[i+1] = counter; 2703 ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr); 2704 } 2705 /*METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &edgecut, part);*/ 2706 ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr); 2707 for(i=0; i<size; i++){ 2708 vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i]; 2709 if(ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1; 2710 for(j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) { 2711 vtxwgt_g[ncon*j] = 1; 2712 if(ncon>1) vtxwgt_g[2*j+1] = 0; 2713 } 2714 } 2715 ierr = PetscMalloc1(64, &options);CHKERRQ(ierr); 2716 ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */ 2717 options[METIS_OPTION_CONTIG] = 1; 2718 PetscStackPush("METIS_PartGraphKway"); 2719 METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal); 2720 PetscStackPop; 2721 PetscInt *distribution; 2722 ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr); 2723 for(i=0; i<numRows; i++) { 2724 distribution[partGlobal[i]] += vtxwgt_g[ncon*i]; 2725 } 2726 ierr = PetscIntView(size, distribution, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2727 ierr = PetscFree(distribution);CHKERRQ(ierr); 2728 ierr = PetscFree(xadj_g);CHKERRQ(ierr); 2729 ierr = PetscFree(adjncy_g);CHKERRQ(ierr); 2730 ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr); 2731 } 2732 ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr); 2733 2734 MPI_Bcast(partGlobal,numRows,MPIU_INT,0, comm); 2735 for(i=cumSumVertices[rank]; i<cumSumVertices[rank+1]; i++) { 2736 part[i-cumSumVertices[rank]] = partGlobal[i]; 2737 } 2738 ierr = PetscFree(partGlobal);CHKERRQ(ierr); 2739 } 2740 2741 ierr = PetscFree(ubvec);CHKERRQ(ierr); 2742 ierr = PetscFree(tpwgts);CHKERRQ(ierr); 2743 ierr = PetscFree(options);CHKERRQ(ierr); 2744 2745 2746 /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */ 2747 2748 ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr); 2749 ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr); 2750 firstVertices[rank] = part[0]; 2751 MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm); 2752 for(i=0; i<size; i++) { 2753 renumbering[firstVertices[i]] = i; 2754 } 2755 for(i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) { 2756 part[i] = renumbering[part[i]]; 2757 } 2758 /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */ 2759 failed = (PetscInt)(part[0] != rank); 2760 MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm); 2761 2762 if(failedGlobal > 0) { 2763 PetscFunctionReturn(1); 2764 } 2765 2766 2767 /*Let's check how well we did distributing points*/ 2768 { 2769 PetscInt *distribution, *distribution_before, min, max, min_before, max_before; 2770 ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr); 2771 ierr = PetscCalloc1(size, &distribution_before);CHKERRQ(ierr); 2772 for(i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) { 2773 distribution[part[i]] += vtxwgt[ncon*i]; 2774 distribution_before[rank] += vtxwgt[ncon*i]; 2775 } 2776 MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm); 2777 MPI_Allreduce(MPI_IN_PLACE, distribution_before, size, MPIU_INT, MPI_SUM, comm); 2778 min = distribution[0]; 2779 max = distribution[0]; 2780 min_before = distribution_before[0]; 2781 max_before = distribution_before[0]; 2782 for(i=1; i<size; i++) { 2783 if(distribution[i]<min) min=distribution[i]; 2784 if(distribution[i]>max) max=distribution[i]; 2785 if(distribution_before[i]<min_before) min_before=distribution_before[i]; 2786 if(distribution_before[i]>max_before) max_before=distribution_before[i]; 2787 } 2788 PetscPrintf(comm, "Before. Min: %i, Max: %i, Ratio: %f\n", min_before, max_before, (max_before*1.)/min_before); 2789 PetscPrintf(comm, "Rebalance. Min: %i, Max: %i, Ratio: %f\n", min, max, (max*1.)/min); 2790 ierr = PetscFree(distribution);CHKERRQ(ierr); 2791 ierr = PetscFree(distribution_before);CHKERRQ(ierr); 2792 } 2793 2794 /* Now check that every vertex is owned by a process that it is actually connected to. */ 2795 for(i=1; i<=numNonExclusivelyOwned; i++) { 2796 PetscInt loc = 0; 2797 ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr); 2798 /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */ 2799 if(loc<0) { 2800 part[i] = rank; 2801 } 2802 } 2803 2804 /* Let's see how significant the influences of the previous fixing up step was.*/ 2805 { 2806 PetscInt *distribution, min, max; 2807 ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr); 2808 for(i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) { 2809 distribution[part[i]] += vtxwgt[ncon*i]; 2810 } 2811 MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm); 2812 min = distribution[0]; 2813 max = distribution[0]; 2814 for(i=1; i<size; i++) { 2815 if(distribution[i]<min) min=distribution[i]; 2816 if(distribution[i]>max) max=distribution[i]; 2817 } 2818 PetscPrintf(comm, "Fixed. Min: %i, Max: %i, Ratio: %f\n", min, max, (max*1.)/min); 2819 ierr = PetscFree(distribution);CHKERRQ(ierr); 2820 } 2821 2822 ierr = PetscFree(cumSumVertices);CHKERRQ(ierr); 2823 ierr = PetscFree(firstVertices);CHKERRQ(ierr); 2824 ierr = PetscFree(renumbering);CHKERRQ(ierr); 2825 ierr = PetscFree(xadj);CHKERRQ(ierr); 2826 ierr = PetscFree(adjncy);CHKERRQ(ierr); 2827 ierr = PetscFree(vtxwgt);CHKERRQ(ierr); 2828 2829 2830 /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */ 2831 PetscInt *newOwners, *newNumbers; 2832 ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr); 2833 ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr); 2834 for(i=0; i<pEnd-pStart; i++) { 2835 newOwners[i] = -1; 2836 newNumbers[i] = -1; 2837 } 2838 { 2839 PetscInt oldNumber, newNumber, oldOwner, newOwner; 2840 counter = 1; 2841 for(i=0; i<pEnd-pStart; i++) { 2842 if(toBalance[i]) { 2843 if(isNonExclusivelyOwned[i]) { 2844 oldNumber = i; 2845 newNumber = -1; 2846 oldOwner = rank; 2847 newOwner = part[counter]; 2848 if(oldOwner == newOwner) { 2849 newNumber = oldNumber; 2850 } else { 2851 for(j=0; j<degree[i]; j++) { 2852 if(locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) { 2853 newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j]; 2854 break; 2855 } 2856 } 2857 } 2858 if(newNumber == -1) { 2859 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Couldn't find the new owner of vertex."); 2860 } 2861 newOwners[oldNumber] = newOwner; 2862 newNumbers[oldNumber] = newNumber; 2863 counter++; 2864 } 2865 } 2866 } 2867 } 2868 ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr); 2869 ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr); 2870 ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr); 2871 2872 PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners); 2873 PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners); 2874 PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers); 2875 PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers); 2876 2877 /* Now count how many leafs we have on each processor. */ 2878 leafCounter=0; 2879 for(i=0; i<pEnd-pStart; i++) { 2880 if(toBalance[i]){ 2881 if(newOwners[i] >= 0 && newOwners[i] != rank) { 2882 leafCounter++; 2883 } 2884 } else { 2885 if(isLeaf[i]) { 2886 leafCounter++; 2887 } 2888 } 2889 } 2890 2891 /* Now set up the new sf by creating the leaf arrays */ 2892 ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr); 2893 ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr); 2894 2895 counter = 0; 2896 leafCounter = 0; 2897 for(i=0; i<pEnd-pStart; i++) { 2898 if(toBalance[i]){ 2899 if(isLeaf[i]) { 2900 counter++; 2901 } 2902 if(newOwners[i] >= 0 && newOwners[i] != rank) { 2903 leafsNew[leafCounter] = i; 2904 leafLocationsNew[leafCounter].rank = newOwners[i]; 2905 leafLocationsNew[leafCounter].index = newNumbers[i]; 2906 leafCounter++; 2907 } 2908 } else { 2909 if(isLeaf[i]) { 2910 leafsNew[leafCounter] = i; 2911 leafLocationsNew[leafCounter].rank = iremote[counter].rank; 2912 leafLocationsNew[leafCounter].index = iremote[counter].index; 2913 leafCounter++; 2914 counter++; 2915 } 2916 } 2917 } 2918 2919 ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr); 2920 2921 2922 ierr = PetscFree(toBalance);CHKERRQ(ierr); 2923 ierr = PetscFree(isLeaf);CHKERRQ(ierr); 2924 ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr); 2925 ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr); 2926 ierr = PetscFree(newOwners);CHKERRQ(ierr); 2927 ierr = PetscFree(newNumbers);CHKERRQ(ierr); 2928 ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr); 2929 #else 2930 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis."); 2931 #endif 2932 PetscFunctionReturn(0); 2933 } 2934