1 #include <petsc-private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 3 PetscClassId PETSCPARTITIONER_CLASSID = 0; 4 5 PetscFunctionList PetscPartitionerList = NULL; 6 PetscBool PetscPartitionerRegisterAllCalled = PETSC_FALSE; 7 8 PetscBool ChacoPartitionercite = PETSC_FALSE; 9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n" 10 " author = {Bruce Hendrickson and Robert Leland},\n" 11 " title = {A multilevel algorithm for partitioning graphs},\n" 12 " booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)}," 13 " isbn = {0-89791-816-9},\n" 14 " pages = {28},\n" 15 " doi = {http://doi.acm.org/10.1145/224170.224228},\n" 16 " publisher = {ACM Press},\n" 17 " address = {New York},\n" 18 " year = {1995}\n}\n"; 19 20 PetscBool ParMetisPartitionercite = PETSC_FALSE; 21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n" 22 " author = {George Karypis and Vipin Kumar},\n" 23 " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n" 24 " journal = {Journal of Parallel and Distributed Computing},\n" 25 " volume = {48},\n" 26 " pages = {71--85},\n" 27 " year = {1998}\n}\n"; 28 29 #undef __FUNCT__ 30 #define __FUNCT__ "DMPlexCreatePartitionerGraph" 31 /*@C 32 DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner 33 34 Input Parameters: 35 + dm - The mesh DM dm 36 - height - Height of the strata from which to construct the graph 37 38 Output Parameter: 39 + numVertices - Number of vertices in the graph 40 - offsets - Point offsets in the graph 41 - adjacency - Point connectivity in the graph 42 43 The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and 44 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 45 representation on the mesh. 46 47 Level: developer 48 49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 50 @*/ 51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency) 52 { 53 PetscInt p, pStart, pEnd, a, adjSize, size; 54 PetscInt *adj = NULL, *vOffsets = NULL; 55 PetscBool useCone, useClosure; 56 PetscSection section; 57 PetscSegBuffer adjBuffer; 58 PetscMPIInt rank; 59 PetscErrorCode ierr; 60 61 PetscFunctionBegin; 62 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 63 ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr); 64 *numVertices = pEnd - pStart; 65 /* Build adjacency graph via a section/segbuffer */ 66 ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), §ion);CHKERRQ(ierr); 67 ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr); 68 ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr); 69 /* Always use FVM adjacency to create partitioner graph */ 70 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 71 ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr); 72 ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr); 73 ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr); 74 for (p = pStart; p < pEnd; p++) { 75 adjSize = PETSC_DETERMINE; 76 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 77 for (a = 0; a < adjSize; ++a) { 78 const PetscInt point = adj[a]; 79 if (point != p && pStart <= point && point < pEnd) { 80 PetscInt *PETSC_RESTRICT pBuf; 81 ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr); 82 ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr); 83 *pBuf = point; 84 } 85 } 86 } 87 ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr); 88 ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr); 89 /* Derive CSR graph from section/segbuffer */ 90 ierr = PetscSectionSetUp(section);CHKERRQ(ierr); 91 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 92 ierr = PetscMalloc2(*numVertices+1, &vOffsets, size, adjacency);CHKERRQ(ierr); 93 for (p = pStart; p < pEnd; p++) {ierr = PetscSectionGetOffset(section, p, &(vOffsets[p]));CHKERRQ(ierr);} 94 vOffsets[*numVertices] = size; 95 if (offsets) *offsets = vOffsets; 96 if (adjacency) {ierr = PetscSegBufferExtractTo(adjBuffer, *adjacency);CHKERRQ(ierr);} 97 /* Clean up */ 98 ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr); 99 ierr = PetscSectionDestroy(§ion);CHKERRQ(ierr); 100 ierr = PetscFree(adj);CHKERRQ(ierr); 101 PetscFunctionReturn(0); 102 } 103 104 #undef __FUNCT__ 105 #define __FUNCT__ "DMPlexCreateNeighborCSR" 106 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency) 107 { 108 const PetscInt maxFaceCases = 30; 109 PetscInt numFaceCases = 0; 110 PetscInt numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */ 111 PetscInt *off, *adj; 112 PetscInt *neighborCells = NULL; 113 PetscInt dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell; 114 PetscErrorCode ierr; 115 116 PetscFunctionBegin; 117 /* For parallel partitioning, I think you have to communicate supports */ 118 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 119 cellDim = dim - cellHeight; 120 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 121 ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr); 122 if (cEnd - cStart == 0) { 123 if (numVertices) *numVertices = 0; 124 if (offsets) *offsets = NULL; 125 if (adjacency) *adjacency = NULL; 126 PetscFunctionReturn(0); 127 } 128 numCells = cEnd - cStart; 129 faceDepth = depth - cellHeight; 130 if (dim == depth) { 131 PetscInt f, fStart, fEnd; 132 133 ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr); 134 /* Count neighboring cells */ 135 ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr); 136 for (f = fStart; f < fEnd; ++f) { 137 const PetscInt *support; 138 PetscInt supportSize; 139 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 140 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 141 if (supportSize == 2) { 142 PetscInt numChildren; 143 144 ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr); 145 if (!numChildren) { 146 ++off[support[0]-cStart+1]; 147 ++off[support[1]-cStart+1]; 148 } 149 } 150 } 151 /* Prefix sum */ 152 for (c = 1; c <= numCells; ++c) off[c] += off[c-1]; 153 if (adjacency) { 154 PetscInt *tmp; 155 156 ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr); 157 ierr = PetscMalloc1((numCells+1), &tmp);CHKERRQ(ierr); 158 ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr); 159 /* Get neighboring cells */ 160 for (f = fStart; f < fEnd; ++f) { 161 const PetscInt *support; 162 PetscInt supportSize; 163 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 164 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 165 if (supportSize == 2) { 166 PetscInt numChildren; 167 168 ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr); 169 if (!numChildren) { 170 adj[tmp[support[0]-cStart]++] = support[1]; 171 adj[tmp[support[1]-cStart]++] = support[0]; 172 } 173 } 174 } 175 #if defined(PETSC_USE_DEBUG) 176 for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart); 177 #endif 178 ierr = PetscFree(tmp);CHKERRQ(ierr); 179 } 180 if (numVertices) *numVertices = numCells; 181 if (offsets) *offsets = off; 182 if (adjacency) *adjacency = adj; 183 PetscFunctionReturn(0); 184 } 185 /* Setup face recognition */ 186 if (faceDepth == 1) { 187 PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */ 188 189 for (c = cStart; c < cEnd; ++c) { 190 PetscInt corners; 191 192 ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr); 193 if (!cornersSeen[corners]) { 194 PetscInt nFV; 195 196 if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases"); 197 cornersSeen[corners] = 1; 198 199 ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr); 200 201 numFaceVertices[numFaceCases++] = nFV; 202 } 203 } 204 } 205 ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr); 206 /* Count neighboring cells */ 207 for (cell = cStart; cell < cEnd; ++cell) { 208 PetscInt numNeighbors = PETSC_DETERMINE, n; 209 210 ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr); 211 /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */ 212 for (n = 0; n < numNeighbors; ++n) { 213 PetscInt cellPair[2]; 214 PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE; 215 PetscInt meetSize = 0; 216 const PetscInt *meet = NULL; 217 218 cellPair[0] = cell; cellPair[1] = neighborCells[n]; 219 if (cellPair[0] == cellPair[1]) continue; 220 if (!found) { 221 ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 222 if (meetSize) { 223 PetscInt f; 224 225 for (f = 0; f < numFaceCases; ++f) { 226 if (numFaceVertices[f] == meetSize) { 227 found = PETSC_TRUE; 228 break; 229 } 230 } 231 } 232 ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 233 } 234 if (found) ++off[cell-cStart+1]; 235 } 236 } 237 /* Prefix sum */ 238 for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1]; 239 240 if (adjacency) { 241 ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr); 242 /* Get neighboring cells */ 243 for (cell = cStart; cell < cEnd; ++cell) { 244 PetscInt numNeighbors = PETSC_DETERMINE, n; 245 PetscInt cellOffset = 0; 246 247 ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr); 248 /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */ 249 for (n = 0; n < numNeighbors; ++n) { 250 PetscInt cellPair[2]; 251 PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE; 252 PetscInt meetSize = 0; 253 const PetscInt *meet = NULL; 254 255 cellPair[0] = cell; cellPair[1] = neighborCells[n]; 256 if (cellPair[0] == cellPair[1]) continue; 257 if (!found) { 258 ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 259 if (meetSize) { 260 PetscInt f; 261 262 for (f = 0; f < numFaceCases; ++f) { 263 if (numFaceVertices[f] == meetSize) { 264 found = PETSC_TRUE; 265 break; 266 } 267 } 268 } 269 ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 270 } 271 if (found) { 272 adj[off[cell-cStart]+cellOffset] = neighborCells[n]; 273 ++cellOffset; 274 } 275 } 276 } 277 } 278 ierr = PetscFree(neighborCells);CHKERRQ(ierr); 279 if (numVertices) *numVertices = numCells; 280 if (offsets) *offsets = off; 281 if (adjacency) *adjacency = adj; 282 PetscFunctionReturn(0); 283 } 284 285 #undef __FUNCT__ 286 #define __FUNCT__ "DMPlexEnlargePartition" 287 /* Expand the partition by BFS on the adjacency graph */ 288 PetscErrorCode DMPlexEnlargePartition(DM dm, const PetscInt start[], const PetscInt adjacency[], PetscSection origPartSection, IS origPartition, PetscSection partSection, IS *partition) 289 { 290 PetscHashI h; 291 const PetscInt *points; 292 PetscInt **tmpPoints, *newPoints, totPoints = 0; 293 PetscInt pStart, pEnd, part, q; 294 PetscBool useCone; 295 PetscErrorCode ierr; 296 297 PetscFunctionBegin; 298 PetscHashICreate(h); 299 ierr = PetscSectionGetChart(origPartSection, &pStart, &pEnd);CHKERRQ(ierr); 300 ierr = PetscSectionSetChart(partSection, pStart, pEnd);CHKERRQ(ierr); 301 ierr = ISGetIndices(origPartition, &points);CHKERRQ(ierr); 302 ierr = PetscMalloc1((pEnd - pStart), &tmpPoints);CHKERRQ(ierr); 303 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 304 ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr); 305 for (part = pStart; part < pEnd; ++part) { 306 PetscInt *adj = NULL; 307 PetscInt numPoints, nP, numNewPoints, off, p, n = 0; 308 309 PetscHashIClear(h); 310 ierr = PetscSectionGetDof(origPartSection, part, &numPoints);CHKERRQ(ierr); 311 ierr = PetscSectionGetOffset(origPartSection, part, &off);CHKERRQ(ierr); 312 /* Add all existing points to h */ 313 for (p = 0; p < numPoints; ++p) { 314 const PetscInt point = points[off+p]; 315 PetscHashIAdd(h, point, 1); 316 } 317 PetscHashISize(h, nP); 318 if (nP != numPoints) SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Invalid partition has %d points, but only %d were unique", numPoints, nP); 319 /* Add all points in next BFS level */ 320 for (p = 0; p < numPoints; ++p) { 321 const PetscInt point = points[off+p]; 322 PetscInt adjSize = PETSC_DETERMINE, a; 323 324 ierr = DMPlexGetAdjacency(dm, point, &adjSize, &adj);CHKERRQ(ierr); 325 for (a = 0; a < adjSize; ++a) PetscHashIAdd(h, adj[a], 1); 326 } 327 PetscHashISize(h, numNewPoints); 328 ierr = PetscSectionSetDof(partSection, part, numNewPoints);CHKERRQ(ierr); 329 ierr = PetscMalloc1(numNewPoints, &tmpPoints[part]);CHKERRQ(ierr); 330 ierr = PetscHashIGetKeys(h, &n, tmpPoints[part]);CHKERRQ(ierr); 331 ierr = PetscFree(adj);CHKERRQ(ierr); 332 totPoints += numNewPoints; 333 } 334 ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr); 335 ierr = ISRestoreIndices(origPartition, &points);CHKERRQ(ierr); 336 PetscHashIDestroy(h); 337 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 338 ierr = PetscMalloc1(totPoints, &newPoints);CHKERRQ(ierr); 339 for (part = pStart, q = 0; part < pEnd; ++part) { 340 PetscInt numPoints, p; 341 342 ierr = PetscSectionGetDof(partSection, part, &numPoints);CHKERRQ(ierr); 343 for (p = 0; p < numPoints; ++p, ++q) newPoints[q] = tmpPoints[part][p]; 344 ierr = PetscFree(tmpPoints[part]);CHKERRQ(ierr); 345 } 346 ierr = PetscFree(tmpPoints);CHKERRQ(ierr); 347 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), totPoints, newPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 348 PetscFunctionReturn(0); 349 } 350 351 #undef __FUNCT__ 352 #define __FUNCT__ "PetscPartitionerRegister" 353 /*@C 354 PetscPartitionerRegister - Adds a new PetscPartitioner implementation 355 356 Not Collective 357 358 Input Parameters: 359 + name - The name of a new user-defined creation routine 360 - create_func - The creation routine itself 361 362 Notes: 363 PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners 364 365 Sample usage: 366 .vb 367 PetscPartitionerRegister("my_part", MyPetscPartitionerCreate); 368 .ve 369 370 Then, your PetscPartitioner type can be chosen with the procedural interface via 371 .vb 372 PetscPartitionerCreate(MPI_Comm, PetscPartitioner *); 373 PetscPartitionerSetType(PetscPartitioner, "my_part"); 374 .ve 375 or at runtime via the option 376 .vb 377 -petscpartitioner_type my_part 378 .ve 379 380 Level: advanced 381 382 .keywords: PetscPartitioner, register 383 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy() 384 385 @*/ 386 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner)) 387 { 388 PetscErrorCode ierr; 389 390 PetscFunctionBegin; 391 ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr); 392 PetscFunctionReturn(0); 393 } 394 395 #undef __FUNCT__ 396 #define __FUNCT__ "PetscPartitionerSetType" 397 /*@C 398 PetscPartitionerSetType - Builds a particular PetscPartitioner 399 400 Collective on PetscPartitioner 401 402 Input Parameters: 403 + part - The PetscPartitioner object 404 - name - The kind of partitioner 405 406 Options Database Key: 407 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types 408 409 Level: intermediate 410 411 .keywords: PetscPartitioner, set, type 412 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate() 413 @*/ 414 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name) 415 { 416 PetscErrorCode (*r)(PetscPartitioner); 417 PetscBool match; 418 PetscErrorCode ierr; 419 420 PetscFunctionBegin; 421 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 422 ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr); 423 if (match) PetscFunctionReturn(0); 424 425 if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);} 426 ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr); 427 if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name); 428 429 if (part->ops->destroy) { 430 ierr = (*part->ops->destroy)(part);CHKERRQ(ierr); 431 part->ops->destroy = NULL; 432 } 433 ierr = (*r)(part);CHKERRQ(ierr); 434 ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr); 435 PetscFunctionReturn(0); 436 } 437 438 #undef __FUNCT__ 439 #define __FUNCT__ "PetscPartitionerGetType" 440 /*@C 441 PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object. 442 443 Not Collective 444 445 Input Parameter: 446 . part - The PetscPartitioner 447 448 Output Parameter: 449 . name - The PetscPartitioner type name 450 451 Level: intermediate 452 453 .keywords: PetscPartitioner, get, type, name 454 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate() 455 @*/ 456 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name) 457 { 458 PetscErrorCode ierr; 459 460 PetscFunctionBegin; 461 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 462 PetscValidPointer(name, 2); 463 if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);} 464 *name = ((PetscObject) part)->type_name; 465 PetscFunctionReturn(0); 466 } 467 468 #undef __FUNCT__ 469 #define __FUNCT__ "PetscPartitionerView" 470 /*@C 471 PetscPartitionerView - Views a PetscPartitioner 472 473 Collective on PetscPartitioner 474 475 Input Parameter: 476 + part - the PetscPartitioner object to view 477 - v - the viewer 478 479 Level: developer 480 481 .seealso: PetscPartitionerDestroy() 482 @*/ 483 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v) 484 { 485 PetscErrorCode ierr; 486 487 PetscFunctionBegin; 488 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 489 if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);} 490 if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);} 491 PetscFunctionReturn(0); 492 } 493 494 #undef __FUNCT__ 495 #define __FUNCT__ "PetscPartitionerViewFromOptions" 496 /* 497 PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed. 498 499 Collective on PetscPartitioner 500 501 Input Parameters: 502 + part - the PetscPartitioner 503 . prefix - prefix to use for viewing, or NULL 504 - optionname - option to activate viewing 505 506 Level: intermediate 507 508 .keywords: PetscPartitioner, view, options, database 509 .seealso: VecViewFromOptions(), MatViewFromOptions() 510 */ 511 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[]) 512 { 513 PetscViewer viewer; 514 PetscViewerFormat format; 515 PetscBool flg; 516 PetscErrorCode ierr; 517 518 PetscFunctionBegin; 519 if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);} 520 else {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);} 521 if (flg) { 522 ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr); 523 ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr); 524 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 525 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 526 } 527 PetscFunctionReturn(0); 528 } 529 #undef __FUNCT__ 530 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal" 531 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part) 532 { 533 const char *defaultType; 534 char name[256]; 535 PetscBool flg; 536 PetscErrorCode ierr; 537 538 PetscFunctionBegin; 539 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 540 if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO; 541 else defaultType = ((PetscObject) part)->type_name; 542 if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);} 543 544 ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr); 545 ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr); 546 if (flg) { 547 ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr); 548 } else if (!((PetscObject) part)->type_name) { 549 ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr); 550 } 551 ierr = PetscOptionsEnd();CHKERRQ(ierr); 552 PetscFunctionReturn(0); 553 } 554 555 #undef __FUNCT__ 556 #define __FUNCT__ "PetscPartitionerSetFromOptions" 557 /*@ 558 PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database 559 560 Collective on PetscPartitioner 561 562 Input Parameter: 563 . part - the PetscPartitioner object to set options for 564 565 Level: developer 566 567 .seealso: PetscPartitionerView() 568 @*/ 569 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part) 570 { 571 PetscErrorCode ierr; 572 573 PetscFunctionBegin; 574 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 575 ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr); 576 577 ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr); 578 if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);} 579 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 580 ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr); 581 ierr = PetscOptionsEnd();CHKERRQ(ierr); 582 ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr); 583 PetscFunctionReturn(0); 584 } 585 586 #undef __FUNCT__ 587 #define __FUNCT__ "PetscPartitionerSetUp" 588 /*@C 589 PetscPartitionerSetUp - Construct data structures for the PetscPartitioner 590 591 Collective on PetscPartitioner 592 593 Input Parameter: 594 . part - the PetscPartitioner object to setup 595 596 Level: developer 597 598 .seealso: PetscPartitionerView(), PetscPartitionerDestroy() 599 @*/ 600 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part) 601 { 602 PetscErrorCode ierr; 603 604 PetscFunctionBegin; 605 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 606 if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);} 607 PetscFunctionReturn(0); 608 } 609 610 #undef __FUNCT__ 611 #define __FUNCT__ "PetscPartitionerDestroy" 612 /*@ 613 PetscPartitionerDestroy - Destroys a PetscPartitioner object 614 615 Collective on PetscPartitioner 616 617 Input Parameter: 618 . part - the PetscPartitioner object to destroy 619 620 Level: developer 621 622 .seealso: PetscPartitionerView() 623 @*/ 624 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part) 625 { 626 PetscErrorCode ierr; 627 628 PetscFunctionBegin; 629 if (!*part) PetscFunctionReturn(0); 630 PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1); 631 632 if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);} 633 ((PetscObject) (*part))->refct = 0; 634 635 if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);} 636 ierr = PetscHeaderDestroy(part);CHKERRQ(ierr); 637 PetscFunctionReturn(0); 638 } 639 640 #undef __FUNCT__ 641 #define __FUNCT__ "PetscPartitionerCreate" 642 /*@ 643 PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType(). 644 645 Collective on MPI_Comm 646 647 Input Parameter: 648 . comm - The communicator for the PetscPartitioner object 649 650 Output Parameter: 651 . part - The PetscPartitioner object 652 653 Level: beginner 654 655 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL 656 @*/ 657 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part) 658 { 659 PetscPartitioner p; 660 PetscErrorCode ierr; 661 662 PetscFunctionBegin; 663 PetscValidPointer(part, 2); 664 *part = NULL; 665 ierr = PetscFVInitializePackage();CHKERRQ(ierr); 666 667 ierr = PetscHeaderCreate(p, _p_PetscPartitioner, struct _PetscPartitionerOps, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr); 668 ierr = PetscMemzero(p->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr); 669 670 *part = p; 671 PetscFunctionReturn(0); 672 } 673 674 #undef __FUNCT__ 675 #define __FUNCT__ "PetscPartitionerPartition" 676 /*@ 677 PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh 678 679 Collective on DM 680 681 Input Parameters: 682 + part - The PetscPartitioner 683 - dm - The mesh DM 684 685 Output Parameters: 686 + partSection - The PetscSection giving the division of points by partition 687 - partition - The list of points by partition 688 689 Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight() 690 691 Level: developer 692 693 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate() 694 @*/ 695 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition) 696 { 697 PetscMPIInt size; 698 PetscErrorCode ierr; 699 700 PetscFunctionBegin; 701 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 702 PetscValidHeaderSpecific(dm, DM_CLASSID, 2); 703 PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4); 704 PetscValidPointer(partition, 5); 705 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr); 706 if (size == 1) { 707 PetscInt *points; 708 PetscInt cStart, cEnd, c; 709 710 ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr); 711 ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr); 712 ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr); 713 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 714 ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr); 715 for (c = cStart; c < cEnd; ++c) points[c] = c; 716 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 717 PetscFunctionReturn(0); 718 } 719 if (part->height == 0) { 720 PetscInt numVertices; 721 PetscInt *start = NULL; 722 PetscInt *adjacency = NULL; 723 724 ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr); 725 if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type"); 726 ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr); 727 ierr = PetscFree(start);CHKERRQ(ierr); 728 ierr = PetscFree(adjacency);CHKERRQ(ierr); 729 } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height); 730 PetscFunctionReturn(0); 731 } 732 733 #undef __FUNCT__ 734 #define __FUNCT__ "PetscPartitionerDestroy_Shell" 735 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part) 736 { 737 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 738 PetscErrorCode ierr; 739 740 PetscFunctionBegin; 741 ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr); 742 ierr = ISDestroy(&p->partition);CHKERRQ(ierr); 743 ierr = PetscFree(p);CHKERRQ(ierr); 744 PetscFunctionReturn(0); 745 } 746 747 #undef __FUNCT__ 748 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii" 749 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer) 750 { 751 PetscViewerFormat format; 752 PetscErrorCode ierr; 753 754 PetscFunctionBegin; 755 ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr); 756 ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr); 757 PetscFunctionReturn(0); 758 } 759 760 #undef __FUNCT__ 761 #define __FUNCT__ "PetscPartitionerView_Shell" 762 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer) 763 { 764 PetscBool iascii; 765 PetscErrorCode ierr; 766 767 PetscFunctionBegin; 768 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 769 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 770 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 771 if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);} 772 PetscFunctionReturn(0); 773 } 774 775 #undef __FUNCT__ 776 #define __FUNCT__ "PetscPartitionerPartition_Shell" 777 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 778 { 779 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 780 PetscInt np; 781 PetscErrorCode ierr; 782 783 PetscFunctionBegin; 784 ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr); 785 if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np); 786 ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr); 787 if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np); 788 ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr); 789 *partition = p->partition; 790 ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr); 791 PetscFunctionReturn(0); 792 } 793 794 #undef __FUNCT__ 795 #define __FUNCT__ "PetscPartitionerInitialize_Shell" 796 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part) 797 { 798 PetscFunctionBegin; 799 part->ops->view = PetscPartitionerView_Shell; 800 part->ops->destroy = PetscPartitionerDestroy_Shell; 801 part->ops->partition = PetscPartitionerPartition_Shell; 802 PetscFunctionReturn(0); 803 } 804 805 /*MC 806 PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object 807 808 Level: intermediate 809 810 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 811 M*/ 812 813 #undef __FUNCT__ 814 #define __FUNCT__ "PetscPartitionerCreate_Shell" 815 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part) 816 { 817 PetscPartitioner_Shell *p; 818 PetscErrorCode ierr; 819 820 PetscFunctionBegin; 821 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 822 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 823 part->data = p; 824 825 ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr); 826 PetscFunctionReturn(0); 827 } 828 829 #undef __FUNCT__ 830 #define __FUNCT__ "PetscPartitionerShellSetPartition" 831 /*@C 832 PetscPartitionerShellSetPartition - Set an artifical partition for a mesh 833 834 Collective on DM 835 836 Input Parameters: 837 + part - The PetscPartitioner 838 . dm - The mesh DM 839 - enlarge - Expand each partition with neighbors 840 841 Level: developer 842 843 .seealso DMPlexDistribute(), PetscPartitionerCreate() 844 @*/ 845 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[]) 846 { 847 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 848 PetscInt proc, numPoints; 849 PetscErrorCode ierr; 850 851 PetscFunctionBegin; 852 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 853 if (sizes) {PetscValidPointer(sizes, 3);} 854 if (sizes) {PetscValidPointer(points, 4);} 855 ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr); 856 ierr = ISDestroy(&p->partition);CHKERRQ(ierr); 857 ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr); 858 ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr); 859 if (sizes) { 860 for (proc = 0; proc < numProcs; ++proc) { 861 ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr); 862 } 863 } 864 ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr); 865 ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr); 866 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr); 867 PetscFunctionReturn(0); 868 } 869 870 #undef __FUNCT__ 871 #define __FUNCT__ "PetscPartitionerDestroy_Chaco" 872 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part) 873 { 874 PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data; 875 PetscErrorCode ierr; 876 877 PetscFunctionBegin; 878 ierr = PetscFree(p);CHKERRQ(ierr); 879 PetscFunctionReturn(0); 880 } 881 882 #undef __FUNCT__ 883 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii" 884 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer) 885 { 886 PetscViewerFormat format; 887 PetscErrorCode ierr; 888 889 PetscFunctionBegin; 890 ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr); 891 ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr); 892 PetscFunctionReturn(0); 893 } 894 895 #undef __FUNCT__ 896 #define __FUNCT__ "PetscPartitionerView_Chaco" 897 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer) 898 { 899 PetscBool iascii; 900 PetscErrorCode ierr; 901 902 PetscFunctionBegin; 903 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 904 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 905 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 906 if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);} 907 PetscFunctionReturn(0); 908 } 909 910 #if defined(PETSC_HAVE_CHACO) 911 #if defined(PETSC_HAVE_UNISTD_H) 912 #include <unistd.h> 913 #endif 914 /* Chaco does not have an include file */ 915 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts, 916 float *ewgts, float *x, float *y, float *z, char *outassignname, 917 char *outfilename, short *assignment, int architecture, int ndims_tot, 918 int mesh_dims[3], double *goal, int global_method, int local_method, 919 int rqi_flag, int vmax, int ndims, double eigtol, long seed); 920 921 extern int FREE_GRAPH; 922 #endif 923 924 #undef __FUNCT__ 925 #define __FUNCT__ "PetscPartitionerPartition_Chaco" 926 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 927 { 928 #if defined(PETSC_HAVE_CHACO) 929 enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3}; 930 MPI_Comm comm; 931 int nvtxs = numVertices; /* number of vertices in full graph */ 932 int *vwgts = NULL; /* weights for all vertices */ 933 float *ewgts = NULL; /* weights for all edges */ 934 float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */ 935 char *outassignname = NULL; /* name of assignment output file */ 936 char *outfilename = NULL; /* output file name */ 937 int architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */ 938 int ndims_tot = 0; /* total number of cube dimensions to divide */ 939 int mesh_dims[3]; /* dimensions of mesh of processors */ 940 double *goal = NULL; /* desired set sizes for each set */ 941 int global_method = 1; /* global partitioning algorithm */ 942 int local_method = 1; /* local partitioning algorithm */ 943 int rqi_flag = 0; /* should I use RQI/Symmlq eigensolver? */ 944 int vmax = 200; /* how many vertices to coarsen down to? */ 945 int ndims = 1; /* number of eigenvectors (2^d sets) */ 946 double eigtol = 0.001; /* tolerance on eigenvectors */ 947 long seed = 123636512; /* for random graph mutations */ 948 short int *assignment; /* Output partition */ 949 int fd_stdout, fd_pipe[2]; 950 PetscInt *points; 951 int i, v, p; 952 PetscErrorCode ierr; 953 954 PetscFunctionBegin; 955 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 956 if (!numVertices) { 957 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 958 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 959 ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 960 PetscFunctionReturn(0); 961 } 962 FREE_GRAPH = 0; /* Do not let Chaco free my memory */ 963 for (i = 0; i < start[numVertices]; ++i) ++adjacency[i]; 964 965 if (global_method == INERTIAL_METHOD) { 966 /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */ 967 SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported"); 968 } 969 mesh_dims[0] = nparts; 970 mesh_dims[1] = 1; 971 mesh_dims[2] = 1; 972 ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr); 973 /* Chaco outputs to stdout. We redirect this to a buffer. */ 974 /* TODO: check error codes for UNIX calls */ 975 #if defined(PETSC_HAVE_UNISTD_H) 976 { 977 int piperet; 978 piperet = pipe(fd_pipe); 979 if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe"); 980 fd_stdout = dup(1); 981 close(1); 982 dup2(fd_pipe[1], 1); 983 } 984 #endif 985 ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename, 986 assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag, 987 vmax, ndims, eigtol, seed); 988 #if defined(PETSC_HAVE_UNISTD_H) 989 { 990 char msgLog[10000]; 991 int count; 992 993 fflush(stdout); 994 count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char)); 995 if (count < 0) count = 0; 996 msgLog[count] = 0; 997 close(1); 998 dup2(fd_stdout, 1); 999 close(fd_stdout); 1000 close(fd_pipe[0]); 1001 close(fd_pipe[1]); 1002 if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog); 1003 } 1004 #endif 1005 /* Convert to PetscSection+IS */ 1006 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1007 for (v = 0; v < nvtxs; ++v) { 1008 ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr); 1009 } 1010 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1011 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1012 for (p = 0, i = 0; p < nparts; ++p) { 1013 for (v = 0; v < nvtxs; ++v) { 1014 if (assignment[v] == p) points[i++] = v; 1015 } 1016 } 1017 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1018 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1019 if (global_method == INERTIAL_METHOD) { 1020 /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */ 1021 } 1022 ierr = PetscFree(assignment);CHKERRQ(ierr); 1023 for (i = 0; i < start[numVertices]; ++i) --adjacency[i]; 1024 PetscFunctionReturn(0); 1025 #else 1026 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco."); 1027 #endif 1028 } 1029 1030 #undef __FUNCT__ 1031 #define __FUNCT__ "PetscPartitionerInitialize_Chaco" 1032 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part) 1033 { 1034 PetscFunctionBegin; 1035 part->ops->view = PetscPartitionerView_Chaco; 1036 part->ops->destroy = PetscPartitionerDestroy_Chaco; 1037 part->ops->partition = PetscPartitionerPartition_Chaco; 1038 PetscFunctionReturn(0); 1039 } 1040 1041 /*MC 1042 PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library 1043 1044 Level: intermediate 1045 1046 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1047 M*/ 1048 1049 #undef __FUNCT__ 1050 #define __FUNCT__ "PetscPartitionerCreate_Chaco" 1051 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part) 1052 { 1053 PetscPartitioner_Chaco *p; 1054 PetscErrorCode ierr; 1055 1056 PetscFunctionBegin; 1057 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1058 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1059 part->data = p; 1060 1061 ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr); 1062 ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr); 1063 PetscFunctionReturn(0); 1064 } 1065 1066 #undef __FUNCT__ 1067 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis" 1068 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part) 1069 { 1070 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 1071 PetscErrorCode ierr; 1072 1073 PetscFunctionBegin; 1074 ierr = PetscFree(p);CHKERRQ(ierr); 1075 PetscFunctionReturn(0); 1076 } 1077 1078 #undef __FUNCT__ 1079 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii" 1080 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer) 1081 { 1082 PetscViewerFormat format; 1083 PetscErrorCode ierr; 1084 1085 PetscFunctionBegin; 1086 ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr); 1087 ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr); 1088 PetscFunctionReturn(0); 1089 } 1090 1091 #undef __FUNCT__ 1092 #define __FUNCT__ "PetscPartitionerView_ParMetis" 1093 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer) 1094 { 1095 PetscBool iascii; 1096 PetscErrorCode ierr; 1097 1098 PetscFunctionBegin; 1099 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1100 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1101 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1102 if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);} 1103 PetscFunctionReturn(0); 1104 } 1105 1106 #if defined(PETSC_HAVE_PARMETIS) 1107 #include <parmetis.h> 1108 #endif 1109 1110 #undef __FUNCT__ 1111 #define __FUNCT__ "PetscPartitionerPartition_ParMetis" 1112 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1113 { 1114 #if defined(PETSC_HAVE_PARMETIS) 1115 MPI_Comm comm; 1116 PetscInt nvtxs = numVertices; /* The number of vertices in full graph */ 1117 PetscInt *vtxdist; /* Distribution of vertices across processes */ 1118 PetscInt *xadj = start; /* Start of edge list for each vertex */ 1119 PetscInt *adjncy = adjacency; /* Edge lists for all vertices */ 1120 PetscInt *vwgt = NULL; /* Vertex weights */ 1121 PetscInt *adjwgt = NULL; /* Edge weights */ 1122 PetscInt wgtflag = 0; /* Indicates which weights are present */ 1123 PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */ 1124 PetscInt ncon = 1; /* The number of weights per vertex */ 1125 PetscReal *tpwgts; /* The fraction of vertex weights assigned to each partition */ 1126 PetscReal *ubvec; /* The balance intolerance for vertex weights */ 1127 PetscInt options[5]; /* Options */ 1128 /* Outputs */ 1129 PetscInt edgeCut; /* The number of edges cut by the partition */ 1130 PetscInt *assignment, *points; 1131 PetscMPIInt rank, p, v, i; 1132 PetscErrorCode ierr; 1133 1134 PetscFunctionBegin; 1135 ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr); 1136 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1137 options[0] = 0; /* Use all defaults */ 1138 /* Calculate vertex distribution */ 1139 ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr); 1140 vtxdist[0] = 0; 1141 ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr); 1142 for (p = 2; p <= nparts; ++p) { 1143 vtxdist[p] += vtxdist[p-1]; 1144 } 1145 /* Calculate weights */ 1146 for (p = 0; p < nparts; ++p) { 1147 tpwgts[p] = 1.0/nparts; 1148 } 1149 ubvec[0] = 1.05; 1150 1151 if (nparts == 1) { 1152 ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt)); 1153 } else { 1154 if (vtxdist[1] == vtxdist[nparts]) { 1155 if (!rank) { 1156 PetscStackPush("METIS_PartGraphKway"); 1157 ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment); 1158 PetscStackPop; 1159 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()"); 1160 } 1161 } else { 1162 PetscStackPush("ParMETIS_V3_PartKway"); 1163 ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm); 1164 PetscStackPop; 1165 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()"); 1166 } 1167 } 1168 /* Convert to PetscSection+IS */ 1169 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1170 for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);} 1171 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1172 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1173 for (p = 0, i = 0; p < nparts; ++p) { 1174 for (v = 0; v < nvtxs; ++v) { 1175 if (assignment[v] == p) points[i++] = v; 1176 } 1177 } 1178 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1179 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1180 ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr); 1181 #else 1182 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis."); 1183 #endif 1184 PetscFunctionReturn(0); 1185 } 1186 1187 #undef __FUNCT__ 1188 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis" 1189 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part) 1190 { 1191 PetscFunctionBegin; 1192 part->ops->view = PetscPartitionerView_ParMetis; 1193 part->ops->destroy = PetscPartitionerDestroy_ParMetis; 1194 part->ops->partition = PetscPartitionerPartition_ParMetis; 1195 PetscFunctionReturn(0); 1196 } 1197 1198 /*MC 1199 PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library 1200 1201 Level: intermediate 1202 1203 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1204 M*/ 1205 1206 #undef __FUNCT__ 1207 #define __FUNCT__ "PetscPartitionerCreate_ParMetis" 1208 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part) 1209 { 1210 PetscPartitioner_ParMetis *p; 1211 PetscErrorCode ierr; 1212 1213 PetscFunctionBegin; 1214 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1215 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1216 part->data = p; 1217 1218 ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr); 1219 ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr); 1220 PetscFunctionReturn(0); 1221 } 1222 1223 #undef __FUNCT__ 1224 #define __FUNCT__ "DMPlexGetPartitioner" 1225 /*@ 1226 DMPlexGetPartitioner - Get the mesh partitioner 1227 1228 Not collective 1229 1230 Input Parameter: 1231 . dm - The DM 1232 1233 Output Parameter: 1234 . part - The PetscPartitioner 1235 1236 Level: developer 1237 1238 .seealso DMPlexDistribute(), PetscPartitionerCreate() 1239 @*/ 1240 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part) 1241 { 1242 DM_Plex *mesh = (DM_Plex *) dm->data; 1243 1244 PetscFunctionBegin; 1245 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1246 PetscValidPointer(part, 2); 1247 *part = mesh->partitioner; 1248 PetscFunctionReturn(0); 1249 } 1250 1251 #undef __FUNCT__ 1252 #define __FUNCT__ "DMPlexMarkTreeClosure" 1253 static PetscErrorCode DMPlexMarkTreeClosure(DM dm, PetscSegBuffer segpart, PetscBT bt, PetscInt point, PetscInt *partSize) 1254 { 1255 PetscInt parent, closureSize, *closure = NULL, i, pStart, pEnd; 1256 PetscErrorCode ierr; 1257 1258 PetscFunctionBegin; 1259 ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr); 1260 if (parent == point) PetscFunctionReturn(0); 1261 ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr); 1262 ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr); 1263 for (i = 0; i < closureSize; i++) { 1264 PetscInt cpoint = closure[2*i]; 1265 1266 if (!PetscBTLookupSet(bt,cpoint-pStart)) { 1267 PetscInt *PETSC_RESTRICT pt; 1268 (*partSize)++; 1269 ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr); 1270 *pt = cpoint; 1271 } 1272 ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,partSize);CHKERRQ(ierr); 1273 } 1274 ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr); 1275 PetscFunctionReturn(0); 1276 } 1277 1278 #undef __FUNCT__ 1279 #define __FUNCT__ "DMPlexCreatePartitionClosure" 1280 PetscErrorCode DMPlexCreatePartitionClosure(DM dm, PetscSection pointSection, IS pointPartition, PetscSection *section, IS *partition) 1281 { 1282 /* const PetscInt height = 0; */ 1283 const PetscInt *partArray; 1284 PetscInt *allPoints, *packPoints; 1285 PetscInt rStart, rEnd, rank, pStart, pEnd, newSize; 1286 PetscErrorCode ierr; 1287 PetscBT bt; 1288 PetscSegBuffer segpack,segpart; 1289 1290 PetscFunctionBegin; 1291 ierr = PetscSectionGetChart(pointSection, &rStart, &rEnd);CHKERRQ(ierr); 1292 ierr = ISGetIndices(pointPartition, &partArray);CHKERRQ(ierr); 1293 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), section);CHKERRQ(ierr); 1294 ierr = PetscSectionSetChart(*section, rStart, rEnd);CHKERRQ(ierr); 1295 ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr); 1296 ierr = PetscBTCreate(pEnd-pStart,&bt);CHKERRQ(ierr); 1297 ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpack);CHKERRQ(ierr); 1298 ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpart);CHKERRQ(ierr); 1299 for (rank = rStart; rank < rEnd; ++rank) { 1300 PetscInt partSize = 0, numPoints, offset, p, *PETSC_RESTRICT placePoints; 1301 1302 ierr = PetscSectionGetDof(pointSection, rank, &numPoints);CHKERRQ(ierr); 1303 ierr = PetscSectionGetOffset(pointSection, rank, &offset);CHKERRQ(ierr); 1304 for (p = 0; p < numPoints; ++p) { 1305 PetscInt point = partArray[offset+p], closureSize, c; 1306 PetscInt *closure = NULL; 1307 1308 /* TODO Include support for height > 0 case */ 1309 ierr = DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 1310 for (c=0; c<closureSize; c++) { 1311 PetscInt cpoint = closure[c*2]; 1312 if (!PetscBTLookupSet(bt,cpoint-pStart)) { 1313 PetscInt *PETSC_RESTRICT pt; 1314 partSize++; 1315 ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr); 1316 *pt = cpoint; 1317 } 1318 ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,&partSize);CHKERRQ(ierr); 1319 } 1320 ierr = DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 1321 } 1322 ierr = PetscSectionSetDof(*section, rank, partSize);CHKERRQ(ierr); 1323 ierr = PetscSegBufferGetInts(segpack,partSize,&placePoints);CHKERRQ(ierr); 1324 ierr = PetscSegBufferExtractTo(segpart,placePoints);CHKERRQ(ierr); 1325 ierr = PetscSortInt(partSize,placePoints);CHKERRQ(ierr); 1326 for (p=0; p<partSize; p++) {ierr = PetscBTClear(bt,placePoints[p]-pStart);CHKERRQ(ierr);} 1327 } 1328 ierr = PetscBTDestroy(&bt);CHKERRQ(ierr); 1329 ierr = PetscSegBufferDestroy(&segpart);CHKERRQ(ierr); 1330 1331 ierr = PetscSectionSetUp(*section);CHKERRQ(ierr); 1332 ierr = PetscSectionGetStorageSize(*section, &newSize);CHKERRQ(ierr); 1333 ierr = PetscMalloc1(newSize, &allPoints);CHKERRQ(ierr); 1334 1335 ierr = PetscSegBufferExtractInPlace(segpack,&packPoints);CHKERRQ(ierr); 1336 for (rank = rStart; rank < rEnd; ++rank) { 1337 PetscInt numPoints, offset; 1338 1339 ierr = PetscSectionGetDof(*section, rank, &numPoints);CHKERRQ(ierr); 1340 ierr = PetscSectionGetOffset(*section, rank, &offset);CHKERRQ(ierr); 1341 ierr = PetscMemcpy(&allPoints[offset], packPoints, numPoints * sizeof(PetscInt));CHKERRQ(ierr); 1342 packPoints += numPoints; 1343 } 1344 1345 ierr = PetscSegBufferDestroy(&segpack);CHKERRQ(ierr); 1346 ierr = ISRestoreIndices(pointPartition, &partArray);CHKERRQ(ierr); 1347 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), newSize, allPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1348 PetscFunctionReturn(0); 1349 } 1350 1351 #undef __FUNCT__ 1352 #define __FUNCT__ "DMPlexPartitionLabelClosure" 1353 /*@ 1354 DMPlexPartitionLabelClosure - Add the closure of all points to the partition label 1355 1356 Input Parameters: 1357 + dm - The DM 1358 - label - DMLabel assinging ranks to remote roots 1359 1360 Level: developer 1361 1362 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 1363 @*/ 1364 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label) 1365 { 1366 IS rankIS, pointIS; 1367 const PetscInt *ranks, *points; 1368 PetscInt numRanks, numPoints, r, p, c, closureSize; 1369 PetscInt *closure = NULL; 1370 PetscErrorCode ierr; 1371 1372 PetscFunctionBegin; 1373 ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr); 1374 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 1375 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 1376 for (r = 0; r < numRanks; ++r) { 1377 const PetscInt rank = ranks[r]; 1378 1379 ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr); 1380 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 1381 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 1382 for (p = 0; p < numPoints; ++p) { 1383 ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 1384 for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);} 1385 } 1386 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 1387 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 1388 } 1389 if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);} 1390 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 1391 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 1392 PetscFunctionReturn(0); 1393 } 1394 1395 #undef __FUNCT__ 1396 #define __FUNCT__ "DMPlexPartitionLabelAdjacency" 1397 /*@ 1398 DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label 1399 1400 Input Parameters: 1401 + dm - The DM 1402 - label - DMLabel assinging ranks to remote roots 1403 1404 Level: developer 1405 1406 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 1407 @*/ 1408 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label) 1409 { 1410 IS rankIS, pointIS; 1411 const PetscInt *ranks, *points; 1412 PetscInt numRanks, numPoints, r, p, a, adjSize; 1413 PetscInt *adj = NULL; 1414 PetscErrorCode ierr; 1415 1416 PetscFunctionBegin; 1417 ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr); 1418 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 1419 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 1420 for (r = 0; r < numRanks; ++r) { 1421 const PetscInt rank = ranks[r]; 1422 1423 ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr); 1424 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 1425 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 1426 for (p = 0; p < numPoints; ++p) { 1427 adjSize = PETSC_DETERMINE; 1428 ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr); 1429 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);} 1430 } 1431 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 1432 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 1433 } 1434 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 1435 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 1436 ierr = PetscFree(adj);CHKERRQ(ierr); 1437 PetscFunctionReturn(0); 1438 } 1439 1440 #undef __FUNCT__ 1441 #define __FUNCT__ "DMPlexPartitionLabelInvert" 1442 /*@ 1443 DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label 1444 1445 Input Parameters: 1446 + dm - The DM 1447 . rootLabel - DMLabel assinging ranks to local roots 1448 . processSF - A star forest mapping into the local index on each remote rank 1449 1450 Output Parameter: 1451 - leafLabel - DMLabel assinging ranks to remote roots 1452 1453 Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The 1454 resulting leafLabel is a receiver mapping of remote roots to their parent rank. 1455 1456 Level: developer 1457 1458 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 1459 @*/ 1460 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel) 1461 { 1462 MPI_Comm comm; 1463 PetscMPIInt rank, numProcs; 1464 PetscInt p, n, numNeighbors, size, l, nleaves; 1465 PetscSF sfPoint; 1466 PetscSFNode *rootPoints, *leafPoints; 1467 PetscSection rootSection, leafSection; 1468 const PetscSFNode *remote; 1469 const PetscInt *local, *neighbors; 1470 IS valueIS; 1471 PetscErrorCode ierr; 1472 1473 PetscFunctionBegin; 1474 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 1475 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1476 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 1477 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 1478 1479 /* Convert to (point, rank) and use actual owners */ 1480 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 1481 ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr); 1482 ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr); 1483 ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr); 1484 ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr); 1485 for (n = 0; n < numNeighbors; ++n) { 1486 PetscInt numPoints; 1487 1488 ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr); 1489 ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr); 1490 } 1491 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 1492 ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr); 1493 ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr); 1494 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 1495 for (n = 0; n < numNeighbors; ++n) { 1496 IS pointIS; 1497 const PetscInt *points; 1498 PetscInt off, numPoints, p; 1499 1500 ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr); 1501 ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr); 1502 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 1503 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 1504 for (p = 0; p < numPoints; ++p) { 1505 if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);} 1506 else {l = -1;} 1507 if (l >= 0) {rootPoints[off+p] = remote[l];} 1508 else {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;} 1509 } 1510 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 1511 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 1512 } 1513 ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr); 1514 ierr = ISDestroy(&valueIS);CHKERRQ(ierr); 1515 /* Communicate overlap */ 1516 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr); 1517 ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr); 1518 /* Filter remote contributions (ovLeafPoints) into the overlapSF */ 1519 ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr); 1520 for (p = 0; p < size; p++) { 1521 ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr); 1522 } 1523 ierr = PetscFree(rootPoints);CHKERRQ(ierr); 1524 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 1525 ierr = PetscFree(leafPoints);CHKERRQ(ierr); 1526 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 1527 PetscFunctionReturn(0); 1528 } 1529 1530 #undef __FUNCT__ 1531 #define __FUNCT__ "DMPlexPartitionLabelCreateSF" 1532 /*@ 1533 DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points 1534 1535 Input Parameters: 1536 + dm - The DM 1537 . label - DMLabel assinging ranks to remote roots 1538 1539 Output Parameter: 1540 - sf - The star forest communication context encapsulating the defined mapping 1541 1542 Note: The incoming label is a receiver mapping of remote points to their parent rank. 1543 1544 Level: developer 1545 1546 .seealso: DMPlexDistribute(), DMPlexCreateOverlap 1547 @*/ 1548 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf) 1549 { 1550 PetscMPIInt numProcs; 1551 PetscInt n, idx, numRemote, p, numPoints, pStart, pEnd; 1552 PetscSFNode *remotePoints; 1553 PetscErrorCode ierr; 1554 1555 PetscFunctionBegin; 1556 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 1557 1558 for (numRemote = 0, n = 0; n < numProcs; ++n) { 1559 ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr); 1560 numRemote += numPoints; 1561 } 1562 ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr); 1563 for (idx = 0, n = 0; n < numProcs; ++n) { 1564 IS remoteRootIS; 1565 const PetscInt *remoteRoots; 1566 ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr); 1567 if (numPoints <= 0) continue; 1568 ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr); 1569 ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 1570 for (p = 0; p < numPoints; p++) { 1571 remotePoints[idx].index = remoteRoots[p]; 1572 remotePoints[idx].rank = n; 1573 idx++; 1574 } 1575 ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 1576 ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr); 1577 } 1578 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr); 1579 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 1580 ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 1581 PetscFunctionReturn(0); 1582 } 1583