1 #include <petsc-private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 3 PetscClassId PETSCPARTITIONER_CLASSID = 0; 4 5 PetscFunctionList PetscPartitionerList = NULL; 6 PetscBool PetscPartitionerRegisterAllCalled = PETSC_FALSE; 7 8 PetscBool ChacoPartitionercite = PETSC_FALSE; 9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n" 10 " author = {Bruce Hendrickson and Robert Leland},\n" 11 " title = {A multilevel algorithm for partitioning graphs},\n" 12 " booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)}," 13 " isbn = {0-89791-816-9},\n" 14 " pages = {28},\n" 15 " doi = {http://doi.acm.org/10.1145/224170.224228},\n" 16 " publisher = {ACM Press},\n" 17 " address = {New York},\n" 18 " year = {1995}\n}\n"; 19 20 PetscBool ParMetisPartitionercite = PETSC_FALSE; 21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n" 22 " author = {George Karypis and Vipin Kumar},\n" 23 " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n" 24 " journal = {Journal of Parallel and Distributed Computing},\n" 25 " volume = {48},\n" 26 " pages = {71--85},\n" 27 " year = {1998}\n}\n"; 28 29 #undef __FUNCT__ 30 #define __FUNCT__ "DMPlexCreateNeighborCSR" 31 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency) 32 { 33 const PetscInt maxFaceCases = 30; 34 PetscInt numFaceCases = 0; 35 PetscInt numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */ 36 PetscInt *off, *adj; 37 PetscInt *neighborCells = NULL; 38 PetscInt dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell; 39 PetscErrorCode ierr; 40 41 PetscFunctionBegin; 42 /* For parallel partitioning, I think you have to communicate supports */ 43 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 44 cellDim = dim - cellHeight; 45 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 46 ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr); 47 if (cEnd - cStart == 0) { 48 if (numVertices) *numVertices = 0; 49 if (offsets) *offsets = NULL; 50 if (adjacency) *adjacency = NULL; 51 PetscFunctionReturn(0); 52 } 53 numCells = cEnd - cStart; 54 faceDepth = depth - cellHeight; 55 if (dim == depth) { 56 PetscInt f, fStart, fEnd; 57 58 ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr); 59 /* Count neighboring cells */ 60 ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr); 61 for (f = fStart; f < fEnd; ++f) { 62 const PetscInt *support; 63 PetscInt supportSize; 64 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 65 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 66 if (supportSize == 2) { 67 PetscInt numChildren; 68 69 ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr); 70 if (!numChildren) { 71 ++off[support[0]-cStart+1]; 72 ++off[support[1]-cStart+1]; 73 } 74 } 75 } 76 /* Prefix sum */ 77 for (c = 1; c <= numCells; ++c) off[c] += off[c-1]; 78 if (adjacency) { 79 PetscInt *tmp; 80 81 ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr); 82 ierr = PetscMalloc1((numCells+1), &tmp);CHKERRQ(ierr); 83 ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr); 84 /* Get neighboring cells */ 85 for (f = fStart; f < fEnd; ++f) { 86 const PetscInt *support; 87 PetscInt supportSize; 88 ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr); 89 ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr); 90 if (supportSize == 2) { 91 PetscInt numChildren; 92 93 ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr); 94 if (!numChildren) { 95 adj[tmp[support[0]-cStart]++] = support[1]; 96 adj[tmp[support[1]-cStart]++] = support[0]; 97 } 98 } 99 } 100 #if defined(PETSC_USE_DEBUG) 101 for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart); 102 #endif 103 ierr = PetscFree(tmp);CHKERRQ(ierr); 104 } 105 if (numVertices) *numVertices = numCells; 106 if (offsets) *offsets = off; 107 if (adjacency) *adjacency = adj; 108 PetscFunctionReturn(0); 109 } 110 /* Setup face recognition */ 111 if (faceDepth == 1) { 112 PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */ 113 114 for (c = cStart; c < cEnd; ++c) { 115 PetscInt corners; 116 117 ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr); 118 if (!cornersSeen[corners]) { 119 PetscInt nFV; 120 121 if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases"); 122 cornersSeen[corners] = 1; 123 124 ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr); 125 126 numFaceVertices[numFaceCases++] = nFV; 127 } 128 } 129 } 130 ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr); 131 /* Count neighboring cells */ 132 for (cell = cStart; cell < cEnd; ++cell) { 133 PetscInt numNeighbors = PETSC_DETERMINE, n; 134 135 ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr); 136 /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */ 137 for (n = 0; n < numNeighbors; ++n) { 138 PetscInt cellPair[2]; 139 PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE; 140 PetscInt meetSize = 0; 141 const PetscInt *meet = NULL; 142 143 cellPair[0] = cell; cellPair[1] = neighborCells[n]; 144 if (cellPair[0] == cellPair[1]) continue; 145 if (!found) { 146 ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 147 if (meetSize) { 148 PetscInt f; 149 150 for (f = 0; f < numFaceCases; ++f) { 151 if (numFaceVertices[f] == meetSize) { 152 found = PETSC_TRUE; 153 break; 154 } 155 } 156 } 157 ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 158 } 159 if (found) ++off[cell-cStart+1]; 160 } 161 } 162 /* Prefix sum */ 163 for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1]; 164 165 if (adjacency) { 166 ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr); 167 /* Get neighboring cells */ 168 for (cell = cStart; cell < cEnd; ++cell) { 169 PetscInt numNeighbors = PETSC_DETERMINE, n; 170 PetscInt cellOffset = 0; 171 172 ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr); 173 /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */ 174 for (n = 0; n < numNeighbors; ++n) { 175 PetscInt cellPair[2]; 176 PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE; 177 PetscInt meetSize = 0; 178 const PetscInt *meet = NULL; 179 180 cellPair[0] = cell; cellPair[1] = neighborCells[n]; 181 if (cellPair[0] == cellPair[1]) continue; 182 if (!found) { 183 ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 184 if (meetSize) { 185 PetscInt f; 186 187 for (f = 0; f < numFaceCases; ++f) { 188 if (numFaceVertices[f] == meetSize) { 189 found = PETSC_TRUE; 190 break; 191 } 192 } 193 } 194 ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr); 195 } 196 if (found) { 197 adj[off[cell-cStart]+cellOffset] = neighborCells[n]; 198 ++cellOffset; 199 } 200 } 201 } 202 } 203 ierr = PetscFree(neighborCells);CHKERRQ(ierr); 204 if (numVertices) *numVertices = numCells; 205 if (offsets) *offsets = off; 206 if (adjacency) *adjacency = adj; 207 PetscFunctionReturn(0); 208 } 209 210 #undef __FUNCT__ 211 #define __FUNCT__ "DMPlexEnlargePartition" 212 /* Expand the partition by BFS on the adjacency graph */ 213 PetscErrorCode DMPlexEnlargePartition(DM dm, const PetscInt start[], const PetscInt adjacency[], PetscSection origPartSection, IS origPartition, PetscSection partSection, IS *partition) 214 { 215 PetscHashI h; 216 const PetscInt *points; 217 PetscInt **tmpPoints, *newPoints, totPoints = 0; 218 PetscInt pStart, pEnd, part, q; 219 PetscBool useCone; 220 PetscErrorCode ierr; 221 222 PetscFunctionBegin; 223 PetscHashICreate(h); 224 ierr = PetscSectionGetChart(origPartSection, &pStart, &pEnd);CHKERRQ(ierr); 225 ierr = PetscSectionSetChart(partSection, pStart, pEnd);CHKERRQ(ierr); 226 ierr = ISGetIndices(origPartition, &points);CHKERRQ(ierr); 227 ierr = PetscMalloc1((pEnd - pStart), &tmpPoints);CHKERRQ(ierr); 228 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 229 ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr); 230 for (part = pStart; part < pEnd; ++part) { 231 PetscInt *adj = NULL; 232 PetscInt numPoints, nP, numNewPoints, off, p, n = 0; 233 234 PetscHashIClear(h); 235 ierr = PetscSectionGetDof(origPartSection, part, &numPoints);CHKERRQ(ierr); 236 ierr = PetscSectionGetOffset(origPartSection, part, &off);CHKERRQ(ierr); 237 /* Add all existing points to h */ 238 for (p = 0; p < numPoints; ++p) { 239 const PetscInt point = points[off+p]; 240 PetscHashIAdd(h, point, 1); 241 } 242 PetscHashISize(h, nP); 243 if (nP != numPoints) SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Invalid partition has %d points, but only %d were unique", numPoints, nP); 244 /* Add all points in next BFS level */ 245 for (p = 0; p < numPoints; ++p) { 246 const PetscInt point = points[off+p]; 247 PetscInt adjSize = PETSC_DETERMINE, a; 248 249 ierr = DMPlexGetAdjacency(dm, point, &adjSize, &adj);CHKERRQ(ierr); 250 for (a = 0; a < adjSize; ++a) PetscHashIAdd(h, adj[a], 1); 251 } 252 PetscHashISize(h, numNewPoints); 253 ierr = PetscSectionSetDof(partSection, part, numNewPoints);CHKERRQ(ierr); 254 ierr = PetscMalloc1(numNewPoints, &tmpPoints[part]);CHKERRQ(ierr); 255 ierr = PetscHashIGetKeys(h, &n, tmpPoints[part]);CHKERRQ(ierr); 256 ierr = PetscFree(adj);CHKERRQ(ierr); 257 totPoints += numNewPoints; 258 } 259 ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr); 260 ierr = ISRestoreIndices(origPartition, &points);CHKERRQ(ierr); 261 PetscHashIDestroy(h); 262 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 263 ierr = PetscMalloc1(totPoints, &newPoints);CHKERRQ(ierr); 264 for (part = pStart, q = 0; part < pEnd; ++part) { 265 PetscInt numPoints, p; 266 267 ierr = PetscSectionGetDof(partSection, part, &numPoints);CHKERRQ(ierr); 268 for (p = 0; p < numPoints; ++p, ++q) newPoints[q] = tmpPoints[part][p]; 269 ierr = PetscFree(tmpPoints[part]);CHKERRQ(ierr); 270 } 271 ierr = PetscFree(tmpPoints);CHKERRQ(ierr); 272 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), totPoints, newPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 273 PetscFunctionReturn(0); 274 } 275 276 #undef __FUNCT__ 277 #define __FUNCT__ "PetscPartitionerRegister" 278 /*@C 279 PetscPartitionerRegister - Adds a new PetscPartitioner implementation 280 281 Not Collective 282 283 Input Parameters: 284 + name - The name of a new user-defined creation routine 285 - create_func - The creation routine itself 286 287 Notes: 288 PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners 289 290 Sample usage: 291 .vb 292 PetscPartitionerRegister("my_part", MyPetscPartitionerCreate); 293 .ve 294 295 Then, your PetscPartitioner type can be chosen with the procedural interface via 296 .vb 297 PetscPartitionerCreate(MPI_Comm, PetscPartitioner *); 298 PetscPartitionerSetType(PetscPartitioner, "my_part"); 299 .ve 300 or at runtime via the option 301 .vb 302 -petscpartitioner_type my_part 303 .ve 304 305 Level: advanced 306 307 .keywords: PetscPartitioner, register 308 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy() 309 310 @*/ 311 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner)) 312 { 313 PetscErrorCode ierr; 314 315 PetscFunctionBegin; 316 ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr); 317 PetscFunctionReturn(0); 318 } 319 320 #undef __FUNCT__ 321 #define __FUNCT__ "PetscPartitionerSetType" 322 /*@C 323 PetscPartitionerSetType - Builds a particular PetscPartitioner 324 325 Collective on PetscPartitioner 326 327 Input Parameters: 328 + part - The PetscPartitioner object 329 - name - The kind of partitioner 330 331 Options Database Key: 332 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types 333 334 Level: intermediate 335 336 .keywords: PetscPartitioner, set, type 337 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate() 338 @*/ 339 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name) 340 { 341 PetscErrorCode (*r)(PetscPartitioner); 342 PetscBool match; 343 PetscErrorCode ierr; 344 345 PetscFunctionBegin; 346 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 347 ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr); 348 if (match) PetscFunctionReturn(0); 349 350 if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);} 351 ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr); 352 if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name); 353 354 if (part->ops->destroy) { 355 ierr = (*part->ops->destroy)(part);CHKERRQ(ierr); 356 part->ops->destroy = NULL; 357 } 358 ierr = (*r)(part);CHKERRQ(ierr); 359 ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr); 360 PetscFunctionReturn(0); 361 } 362 363 #undef __FUNCT__ 364 #define __FUNCT__ "PetscPartitionerGetType" 365 /*@C 366 PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object. 367 368 Not Collective 369 370 Input Parameter: 371 . part - The PetscPartitioner 372 373 Output Parameter: 374 . name - The PetscPartitioner type name 375 376 Level: intermediate 377 378 .keywords: PetscPartitioner, get, type, name 379 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate() 380 @*/ 381 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name) 382 { 383 PetscErrorCode ierr; 384 385 PetscFunctionBegin; 386 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 387 PetscValidPointer(name, 2); 388 if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);} 389 *name = ((PetscObject) part)->type_name; 390 PetscFunctionReturn(0); 391 } 392 393 #undef __FUNCT__ 394 #define __FUNCT__ "PetscPartitionerView" 395 /*@C 396 PetscPartitionerView - Views a PetscPartitioner 397 398 Collective on PetscPartitioner 399 400 Input Parameter: 401 + part - the PetscPartitioner object to view 402 - v - the viewer 403 404 Level: developer 405 406 .seealso: PetscPartitionerDestroy() 407 @*/ 408 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v) 409 { 410 PetscErrorCode ierr; 411 412 PetscFunctionBegin; 413 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 414 if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);} 415 if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);} 416 PetscFunctionReturn(0); 417 } 418 419 #undef __FUNCT__ 420 #define __FUNCT__ "PetscPartitionerViewFromOptions" 421 /* 422 PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed. 423 424 Collective on PetscPartitioner 425 426 Input Parameters: 427 + part - the PetscPartitioner 428 . prefix - prefix to use for viewing, or NULL 429 - optionname - option to activate viewing 430 431 Level: intermediate 432 433 .keywords: PetscPartitioner, view, options, database 434 .seealso: VecViewFromOptions(), MatViewFromOptions() 435 */ 436 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[]) 437 { 438 PetscViewer viewer; 439 PetscViewerFormat format; 440 PetscBool flg; 441 PetscErrorCode ierr; 442 443 PetscFunctionBegin; 444 if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);} 445 else {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);} 446 if (flg) { 447 ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr); 448 ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr); 449 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 450 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 451 } 452 PetscFunctionReturn(0); 453 } 454 #undef __FUNCT__ 455 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal" 456 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part) 457 { 458 const char *defaultType; 459 char name[256]; 460 PetscBool flg; 461 PetscErrorCode ierr; 462 463 PetscFunctionBegin; 464 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 465 if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO; 466 else defaultType = ((PetscObject) part)->type_name; 467 if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);} 468 469 ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr); 470 ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr); 471 if (flg) { 472 ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr); 473 } else if (!((PetscObject) part)->type_name) { 474 ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr); 475 } 476 ierr = PetscOptionsEnd();CHKERRQ(ierr); 477 PetscFunctionReturn(0); 478 } 479 480 #undef __FUNCT__ 481 #define __FUNCT__ "PetscPartitionerSetFromOptions" 482 /*@ 483 PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database 484 485 Collective on PetscPartitioner 486 487 Input Parameter: 488 . part - the PetscPartitioner object to set options for 489 490 Level: developer 491 492 .seealso: PetscPartitionerView() 493 @*/ 494 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part) 495 { 496 PetscErrorCode ierr; 497 498 PetscFunctionBegin; 499 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 500 ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr); 501 502 ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr); 503 if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);} 504 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 505 ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr); 506 ierr = PetscOptionsEnd();CHKERRQ(ierr); 507 ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr); 508 PetscFunctionReturn(0); 509 } 510 511 #undef __FUNCT__ 512 #define __FUNCT__ "PetscPartitionerSetUp" 513 /*@C 514 PetscPartitionerSetUp - Construct data structures for the PetscPartitioner 515 516 Collective on PetscPartitioner 517 518 Input Parameter: 519 . part - the PetscPartitioner object to setup 520 521 Level: developer 522 523 .seealso: PetscPartitionerView(), PetscPartitionerDestroy() 524 @*/ 525 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part) 526 { 527 PetscErrorCode ierr; 528 529 PetscFunctionBegin; 530 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 531 if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);} 532 PetscFunctionReturn(0); 533 } 534 535 #undef __FUNCT__ 536 #define __FUNCT__ "PetscPartitionerDestroy" 537 /*@ 538 PetscPartitionerDestroy - Destroys a PetscPartitioner object 539 540 Collective on PetscPartitioner 541 542 Input Parameter: 543 . part - the PetscPartitioner object to destroy 544 545 Level: developer 546 547 .seealso: PetscPartitionerView() 548 @*/ 549 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part) 550 { 551 PetscErrorCode ierr; 552 553 PetscFunctionBegin; 554 if (!*part) PetscFunctionReturn(0); 555 PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1); 556 557 if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);} 558 ((PetscObject) (*part))->refct = 0; 559 560 if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);} 561 ierr = PetscHeaderDestroy(part);CHKERRQ(ierr); 562 PetscFunctionReturn(0); 563 } 564 565 #undef __FUNCT__ 566 #define __FUNCT__ "PetscPartitionerCreate" 567 /*@ 568 PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType(). 569 570 Collective on MPI_Comm 571 572 Input Parameter: 573 . comm - The communicator for the PetscPartitioner object 574 575 Output Parameter: 576 . part - The PetscPartitioner object 577 578 Level: beginner 579 580 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL 581 @*/ 582 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part) 583 { 584 PetscPartitioner p; 585 PetscErrorCode ierr; 586 587 PetscFunctionBegin; 588 PetscValidPointer(part, 2); 589 *part = NULL; 590 ierr = PetscFVInitializePackage();CHKERRQ(ierr); 591 592 ierr = PetscHeaderCreate(p, _p_PetscPartitioner, struct _PetscPartitionerOps, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr); 593 ierr = PetscMemzero(p->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr); 594 595 *part = p; 596 PetscFunctionReturn(0); 597 } 598 599 #undef __FUNCT__ 600 #define __FUNCT__ "PetscPartitionerPartition" 601 /*@ 602 PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh 603 604 Collective on DM 605 606 Input Parameters: 607 + part - The PetscPartitioner 608 - dm - The mesh DM 609 610 Output Parameters: 611 + partSection - The PetscSection giving the division of points by partition 612 - partition - The list of points by partition 613 614 Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight() 615 616 Level: developer 617 618 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate() 619 @*/ 620 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition) 621 { 622 PetscMPIInt size; 623 PetscErrorCode ierr; 624 625 PetscFunctionBegin; 626 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 627 PetscValidHeaderSpecific(dm, DM_CLASSID, 2); 628 PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4); 629 PetscValidPointer(partition, 5); 630 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr); 631 if (size == 1) { 632 PetscInt *points; 633 PetscInt cStart, cEnd, c; 634 635 ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr); 636 ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr); 637 ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr); 638 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 639 ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr); 640 for (c = cStart; c < cEnd; ++c) points[c] = c; 641 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 642 PetscFunctionReturn(0); 643 } 644 if (part->height == 0) { 645 PetscInt numVertices; 646 PetscInt *start = NULL; 647 PetscInt *adjacency = NULL; 648 649 ierr = DMPlexCreateNeighborCSR(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr); 650 if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type"); 651 ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr); 652 ierr = PetscFree(start);CHKERRQ(ierr); 653 ierr = PetscFree(adjacency);CHKERRQ(ierr); 654 } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height); 655 PetscFunctionReturn(0); 656 } 657 658 #undef __FUNCT__ 659 #define __FUNCT__ "PetscPartitionerDestroy_Shell" 660 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part) 661 { 662 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 663 PetscErrorCode ierr; 664 665 PetscFunctionBegin; 666 ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr); 667 ierr = ISDestroy(&p->partition);CHKERRQ(ierr); 668 ierr = PetscFree(p);CHKERRQ(ierr); 669 PetscFunctionReturn(0); 670 } 671 672 #undef __FUNCT__ 673 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii" 674 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer) 675 { 676 PetscViewerFormat format; 677 PetscErrorCode ierr; 678 679 PetscFunctionBegin; 680 ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr); 681 ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr); 682 PetscFunctionReturn(0); 683 } 684 685 #undef __FUNCT__ 686 #define __FUNCT__ "PetscPartitionerView_Shell" 687 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer) 688 { 689 PetscBool iascii; 690 PetscErrorCode ierr; 691 692 PetscFunctionBegin; 693 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 694 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 695 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 696 if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);} 697 PetscFunctionReturn(0); 698 } 699 700 #undef __FUNCT__ 701 #define __FUNCT__ "PetscPartitionerPartition_Shell" 702 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 703 { 704 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 705 PetscInt np; 706 PetscErrorCode ierr; 707 708 PetscFunctionBegin; 709 ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr); 710 if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np); 711 ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr); 712 if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np); 713 ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr); 714 *partition = p->partition; 715 ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr); 716 PetscFunctionReturn(0); 717 } 718 719 #undef __FUNCT__ 720 #define __FUNCT__ "PetscPartitionerInitialize_Shell" 721 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part) 722 { 723 PetscFunctionBegin; 724 part->ops->view = PetscPartitionerView_Shell; 725 part->ops->destroy = PetscPartitionerDestroy_Shell; 726 part->ops->partition = PetscPartitionerPartition_Shell; 727 PetscFunctionReturn(0); 728 } 729 730 /*MC 731 PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object 732 733 Level: intermediate 734 735 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 736 M*/ 737 738 #undef __FUNCT__ 739 #define __FUNCT__ "PetscPartitionerCreate_Shell" 740 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part) 741 { 742 PetscPartitioner_Shell *p; 743 PetscErrorCode ierr; 744 745 PetscFunctionBegin; 746 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 747 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 748 part->data = p; 749 750 ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr); 751 PetscFunctionReturn(0); 752 } 753 754 #undef __FUNCT__ 755 #define __FUNCT__ "PetscPartitionerShellSetPartition" 756 /*@C 757 PetscPartitionerShellSetPartition - Set an artifical partition for a mesh 758 759 Collective on DM 760 761 Input Parameters: 762 + part - The PetscPartitioner 763 . dm - The mesh DM 764 - enlarge - Expand each partition with neighbors 765 766 Level: developer 767 768 .seealso DMPlexDistribute(), PetscPartitionerCreate() 769 @*/ 770 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[]) 771 { 772 PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data; 773 PetscInt proc, numPoints; 774 PetscErrorCode ierr; 775 776 PetscFunctionBegin; 777 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 778 if (sizes) {PetscValidPointer(sizes, 3);} 779 if (sizes) {PetscValidPointer(points, 4);} 780 ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr); 781 ierr = ISDestroy(&p->partition);CHKERRQ(ierr); 782 ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr); 783 ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr); 784 if (sizes) { 785 for (proc = 0; proc < numProcs; ++proc) { 786 ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr); 787 } 788 } 789 ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr); 790 ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr); 791 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr); 792 PetscFunctionReturn(0); 793 } 794 795 #undef __FUNCT__ 796 #define __FUNCT__ "PetscPartitionerDestroy_Chaco" 797 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part) 798 { 799 PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data; 800 PetscErrorCode ierr; 801 802 PetscFunctionBegin; 803 ierr = PetscFree(p);CHKERRQ(ierr); 804 PetscFunctionReturn(0); 805 } 806 807 #undef __FUNCT__ 808 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii" 809 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer) 810 { 811 PetscViewerFormat format; 812 PetscErrorCode ierr; 813 814 PetscFunctionBegin; 815 ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr); 816 ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr); 817 PetscFunctionReturn(0); 818 } 819 820 #undef __FUNCT__ 821 #define __FUNCT__ "PetscPartitionerView_Chaco" 822 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer) 823 { 824 PetscBool iascii; 825 PetscErrorCode ierr; 826 827 PetscFunctionBegin; 828 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 829 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 830 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 831 if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);} 832 PetscFunctionReturn(0); 833 } 834 835 #if defined(PETSC_HAVE_CHACO) 836 #if defined(PETSC_HAVE_UNISTD_H) 837 #include <unistd.h> 838 #endif 839 /* Chaco does not have an include file */ 840 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts, 841 float *ewgts, float *x, float *y, float *z, char *outassignname, 842 char *outfilename, short *assignment, int architecture, int ndims_tot, 843 int mesh_dims[3], double *goal, int global_method, int local_method, 844 int rqi_flag, int vmax, int ndims, double eigtol, long seed); 845 846 extern int FREE_GRAPH; 847 #endif 848 849 #undef __FUNCT__ 850 #define __FUNCT__ "PetscPartitionerPartition_Chaco" 851 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 852 { 853 #if defined(PETSC_HAVE_CHACO) 854 enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3}; 855 MPI_Comm comm; 856 int nvtxs = numVertices; /* number of vertices in full graph */ 857 int *vwgts = NULL; /* weights for all vertices */ 858 float *ewgts = NULL; /* weights for all edges */ 859 float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */ 860 char *outassignname = NULL; /* name of assignment output file */ 861 char *outfilename = NULL; /* output file name */ 862 int architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */ 863 int ndims_tot = 0; /* total number of cube dimensions to divide */ 864 int mesh_dims[3]; /* dimensions of mesh of processors */ 865 double *goal = NULL; /* desired set sizes for each set */ 866 int global_method = 1; /* global partitioning algorithm */ 867 int local_method = 1; /* local partitioning algorithm */ 868 int rqi_flag = 0; /* should I use RQI/Symmlq eigensolver? */ 869 int vmax = 200; /* how many vertices to coarsen down to? */ 870 int ndims = 1; /* number of eigenvectors (2^d sets) */ 871 double eigtol = 0.001; /* tolerance on eigenvectors */ 872 long seed = 123636512; /* for random graph mutations */ 873 short int *assignment; /* Output partition */ 874 int fd_stdout, fd_pipe[2]; 875 PetscInt *points; 876 int i, v, p; 877 PetscErrorCode ierr; 878 879 PetscFunctionBegin; 880 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 881 if (!numVertices) { 882 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 883 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 884 ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 885 PetscFunctionReturn(0); 886 } 887 FREE_GRAPH = 0; /* Do not let Chaco free my memory */ 888 for (i = 0; i < start[numVertices]; ++i) ++adjacency[i]; 889 890 if (global_method == INERTIAL_METHOD) { 891 /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */ 892 SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported"); 893 } 894 mesh_dims[0] = nparts; 895 mesh_dims[1] = 1; 896 mesh_dims[2] = 1; 897 ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr); 898 /* Chaco outputs to stdout. We redirect this to a buffer. */ 899 /* TODO: check error codes for UNIX calls */ 900 #if defined(PETSC_HAVE_UNISTD_H) 901 { 902 int piperet; 903 piperet = pipe(fd_pipe); 904 if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe"); 905 fd_stdout = dup(1); 906 close(1); 907 dup2(fd_pipe[1], 1); 908 } 909 #endif 910 ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename, 911 assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag, 912 vmax, ndims, eigtol, seed); 913 #if defined(PETSC_HAVE_UNISTD_H) 914 { 915 char msgLog[10000]; 916 int count; 917 918 fflush(stdout); 919 count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char)); 920 if (count < 0) count = 0; 921 msgLog[count] = 0; 922 close(1); 923 dup2(fd_stdout, 1); 924 close(fd_stdout); 925 close(fd_pipe[0]); 926 close(fd_pipe[1]); 927 if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog); 928 } 929 #endif 930 /* Convert to PetscSection+IS */ 931 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 932 for (v = 0; v < nvtxs; ++v) { 933 ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr); 934 } 935 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 936 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 937 for (p = 0, i = 0; p < nparts; ++p) { 938 for (v = 0; v < nvtxs; ++v) { 939 if (assignment[v] == p) points[i++] = v; 940 } 941 } 942 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 943 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 944 if (global_method == INERTIAL_METHOD) { 945 /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */ 946 } 947 ierr = PetscFree(assignment);CHKERRQ(ierr); 948 for (i = 0; i < start[numVertices]; ++i) --adjacency[i]; 949 PetscFunctionReturn(0); 950 #else 951 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco."); 952 #endif 953 } 954 955 #undef __FUNCT__ 956 #define __FUNCT__ "PetscPartitionerInitialize_Chaco" 957 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part) 958 { 959 PetscFunctionBegin; 960 part->ops->view = PetscPartitionerView_Chaco; 961 part->ops->destroy = PetscPartitionerDestroy_Chaco; 962 part->ops->partition = PetscPartitionerPartition_Chaco; 963 PetscFunctionReturn(0); 964 } 965 966 /*MC 967 PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library 968 969 Level: intermediate 970 971 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 972 M*/ 973 974 #undef __FUNCT__ 975 #define __FUNCT__ "PetscPartitionerCreate_Chaco" 976 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part) 977 { 978 PetscPartitioner_Chaco *p; 979 PetscErrorCode ierr; 980 981 PetscFunctionBegin; 982 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 983 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 984 part->data = p; 985 986 ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr); 987 ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr); 988 PetscFunctionReturn(0); 989 } 990 991 #undef __FUNCT__ 992 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis" 993 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part) 994 { 995 PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data; 996 PetscErrorCode ierr; 997 998 PetscFunctionBegin; 999 ierr = PetscFree(p);CHKERRQ(ierr); 1000 PetscFunctionReturn(0); 1001 } 1002 1003 #undef __FUNCT__ 1004 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii" 1005 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer) 1006 { 1007 PetscViewerFormat format; 1008 PetscErrorCode ierr; 1009 1010 PetscFunctionBegin; 1011 ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr); 1012 ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr); 1013 PetscFunctionReturn(0); 1014 } 1015 1016 #undef __FUNCT__ 1017 #define __FUNCT__ "PetscPartitionerView_ParMetis" 1018 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer) 1019 { 1020 PetscBool iascii; 1021 PetscErrorCode ierr; 1022 1023 PetscFunctionBegin; 1024 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1025 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1026 ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); 1027 if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);} 1028 PetscFunctionReturn(0); 1029 } 1030 1031 #if defined(PETSC_HAVE_PARMETIS) 1032 #include <parmetis.h> 1033 #endif 1034 1035 #undef __FUNCT__ 1036 #define __FUNCT__ "PetscPartitionerPartition_ParMetis" 1037 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition) 1038 { 1039 #if defined(PETSC_HAVE_PARMETIS) 1040 MPI_Comm comm; 1041 PetscInt nvtxs = numVertices; /* The number of vertices in full graph */ 1042 PetscInt *vtxdist; /* Distribution of vertices across processes */ 1043 PetscInt *xadj = start; /* Start of edge list for each vertex */ 1044 PetscInt *adjncy = adjacency; /* Edge lists for all vertices */ 1045 PetscInt *vwgt = NULL; /* Vertex weights */ 1046 PetscInt *adjwgt = NULL; /* Edge weights */ 1047 PetscInt wgtflag = 0; /* Indicates which weights are present */ 1048 PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */ 1049 PetscInt ncon = 1; /* The number of weights per vertex */ 1050 PetscReal *tpwgts; /* The fraction of vertex weights assigned to each partition */ 1051 PetscReal *ubvec; /* The balance intolerance for vertex weights */ 1052 PetscInt options[5]; /* Options */ 1053 /* Outputs */ 1054 PetscInt edgeCut; /* The number of edges cut by the partition */ 1055 PetscInt *assignment, *points; 1056 PetscMPIInt rank, p, v, i; 1057 PetscErrorCode ierr; 1058 1059 PetscFunctionBegin; 1060 ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr); 1061 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1062 options[0] = 0; /* Use all defaults */ 1063 /* Calculate vertex distribution */ 1064 ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr); 1065 vtxdist[0] = 0; 1066 ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr); 1067 for (p = 2; p <= nparts; ++p) { 1068 vtxdist[p] += vtxdist[p-1]; 1069 } 1070 /* Calculate weights */ 1071 for (p = 0; p < nparts; ++p) { 1072 tpwgts[p] = 1.0/nparts; 1073 } 1074 ubvec[0] = 1.05; 1075 1076 if (nparts == 1) { 1077 ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt)); 1078 } else { 1079 if (vtxdist[1] == vtxdist[nparts]) { 1080 if (!rank) { 1081 PetscStackPush("METIS_PartGraphKway"); 1082 ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment); 1083 PetscStackPop; 1084 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()"); 1085 } 1086 } else { 1087 PetscStackPush("ParMETIS_V3_PartKway"); 1088 ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm); 1089 PetscStackPop; 1090 if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()"); 1091 } 1092 } 1093 /* Convert to PetscSection+IS */ 1094 ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr); 1095 for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);} 1096 ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr); 1097 ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr); 1098 for (p = 0, i = 0; p < nparts; ++p) { 1099 for (v = 0; v < nvtxs; ++v) { 1100 if (assignment[v] == p) points[i++] = v; 1101 } 1102 } 1103 if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs); 1104 ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1105 ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr); 1106 #else 1107 SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis."); 1108 #endif 1109 PetscFunctionReturn(0); 1110 } 1111 1112 #undef __FUNCT__ 1113 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis" 1114 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part) 1115 { 1116 PetscFunctionBegin; 1117 part->ops->view = PetscPartitionerView_ParMetis; 1118 part->ops->destroy = PetscPartitionerDestroy_ParMetis; 1119 part->ops->partition = PetscPartitionerPartition_ParMetis; 1120 PetscFunctionReturn(0); 1121 } 1122 1123 /*MC 1124 PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library 1125 1126 Level: intermediate 1127 1128 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType() 1129 M*/ 1130 1131 #undef __FUNCT__ 1132 #define __FUNCT__ "PetscPartitionerCreate_ParMetis" 1133 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part) 1134 { 1135 PetscPartitioner_ParMetis *p; 1136 PetscErrorCode ierr; 1137 1138 PetscFunctionBegin; 1139 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 1140 ierr = PetscNewLog(part, &p);CHKERRQ(ierr); 1141 part->data = p; 1142 1143 ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr); 1144 ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr); 1145 PetscFunctionReturn(0); 1146 } 1147 1148 #undef __FUNCT__ 1149 #define __FUNCT__ "DMPlexGetPartitioner" 1150 /*@ 1151 DMPlexGetPartitioner - Get the mesh partitioner 1152 1153 Not collective 1154 1155 Input Parameter: 1156 . dm - The DM 1157 1158 Output Parameter: 1159 . part - The PetscPartitioner 1160 1161 Level: developer 1162 1163 .seealso DMPlexDistribute(), PetscPartitionerCreate() 1164 @*/ 1165 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part) 1166 { 1167 DM_Plex *mesh = (DM_Plex *) dm->data; 1168 1169 PetscFunctionBegin; 1170 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1171 PetscValidPointer(part, 2); 1172 *part = mesh->partitioner; 1173 PetscFunctionReturn(0); 1174 } 1175 1176 #undef __FUNCT__ 1177 #define __FUNCT__ "DMPlexMarkTreeClosure" 1178 static PetscErrorCode DMPlexMarkTreeClosure(DM dm, PetscSegBuffer segpart, PetscBT bt, PetscInt point, PetscInt *partSize) 1179 { 1180 PetscInt parent, closureSize, *closure = NULL, i, pStart, pEnd; 1181 PetscErrorCode ierr; 1182 1183 PetscFunctionBegin; 1184 ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr); 1185 if (parent == point) PetscFunctionReturn(0); 1186 ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr); 1187 ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr); 1188 for (i = 0; i < closureSize; i++) { 1189 PetscInt cpoint = closure[2*i]; 1190 1191 if (!PetscBTLookupSet(bt,cpoint-pStart)) { 1192 PetscInt *PETSC_RESTRICT pt; 1193 (*partSize)++; 1194 ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr); 1195 *pt = cpoint; 1196 } 1197 ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,partSize);CHKERRQ(ierr); 1198 } 1199 ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr); 1200 PetscFunctionReturn(0); 1201 } 1202 1203 #undef __FUNCT__ 1204 #define __FUNCT__ "DMPlexCreatePartitionClosure" 1205 PetscErrorCode DMPlexCreatePartitionClosure(DM dm, PetscSection pointSection, IS pointPartition, PetscSection *section, IS *partition) 1206 { 1207 /* const PetscInt height = 0; */ 1208 const PetscInt *partArray; 1209 PetscInt *allPoints, *packPoints; 1210 PetscInt rStart, rEnd, rank, pStart, pEnd, newSize; 1211 PetscErrorCode ierr; 1212 PetscBT bt; 1213 PetscSegBuffer segpack,segpart; 1214 1215 PetscFunctionBegin; 1216 ierr = PetscSectionGetChart(pointSection, &rStart, &rEnd);CHKERRQ(ierr); 1217 ierr = ISGetIndices(pointPartition, &partArray);CHKERRQ(ierr); 1218 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), section);CHKERRQ(ierr); 1219 ierr = PetscSectionSetChart(*section, rStart, rEnd);CHKERRQ(ierr); 1220 ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr); 1221 ierr = PetscBTCreate(pEnd-pStart,&bt);CHKERRQ(ierr); 1222 ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpack);CHKERRQ(ierr); 1223 ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpart);CHKERRQ(ierr); 1224 for (rank = rStart; rank < rEnd; ++rank) { 1225 PetscInt partSize = 0, numPoints, offset, p, *PETSC_RESTRICT placePoints; 1226 1227 ierr = PetscSectionGetDof(pointSection, rank, &numPoints);CHKERRQ(ierr); 1228 ierr = PetscSectionGetOffset(pointSection, rank, &offset);CHKERRQ(ierr); 1229 for (p = 0; p < numPoints; ++p) { 1230 PetscInt point = partArray[offset+p], closureSize, c; 1231 PetscInt *closure = NULL; 1232 1233 /* TODO Include support for height > 0 case */ 1234 ierr = DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 1235 for (c=0; c<closureSize; c++) { 1236 PetscInt cpoint = closure[c*2]; 1237 if (!PetscBTLookupSet(bt,cpoint-pStart)) { 1238 PetscInt *PETSC_RESTRICT pt; 1239 partSize++; 1240 ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr); 1241 *pt = cpoint; 1242 } 1243 ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,&partSize);CHKERRQ(ierr); 1244 } 1245 ierr = DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 1246 } 1247 ierr = PetscSectionSetDof(*section, rank, partSize);CHKERRQ(ierr); 1248 ierr = PetscSegBufferGetInts(segpack,partSize,&placePoints);CHKERRQ(ierr); 1249 ierr = PetscSegBufferExtractTo(segpart,placePoints);CHKERRQ(ierr); 1250 ierr = PetscSortInt(partSize,placePoints);CHKERRQ(ierr); 1251 for (p=0; p<partSize; p++) {ierr = PetscBTClear(bt,placePoints[p]-pStart);CHKERRQ(ierr);} 1252 } 1253 ierr = PetscBTDestroy(&bt);CHKERRQ(ierr); 1254 ierr = PetscSegBufferDestroy(&segpart);CHKERRQ(ierr); 1255 1256 ierr = PetscSectionSetUp(*section);CHKERRQ(ierr); 1257 ierr = PetscSectionGetStorageSize(*section, &newSize);CHKERRQ(ierr); 1258 ierr = PetscMalloc1(newSize, &allPoints);CHKERRQ(ierr); 1259 1260 ierr = PetscSegBufferExtractInPlace(segpack,&packPoints);CHKERRQ(ierr); 1261 for (rank = rStart; rank < rEnd; ++rank) { 1262 PetscInt numPoints, offset; 1263 1264 ierr = PetscSectionGetDof(*section, rank, &numPoints);CHKERRQ(ierr); 1265 ierr = PetscSectionGetOffset(*section, rank, &offset);CHKERRQ(ierr); 1266 ierr = PetscMemcpy(&allPoints[offset], packPoints, numPoints * sizeof(PetscInt));CHKERRQ(ierr); 1267 packPoints += numPoints; 1268 } 1269 1270 ierr = PetscSegBufferDestroy(&segpack);CHKERRQ(ierr); 1271 ierr = ISRestoreIndices(pointPartition, &partArray);CHKERRQ(ierr); 1272 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), newSize, allPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr); 1273 PetscFunctionReturn(0); 1274 } 1275 1276 #undef __FUNCT__ 1277 #define __FUNCT__ "DMPlexPartitionLabelClosure" 1278 /*@ 1279 DMPlexPartitionLabelClosure - Add the closure of all points to the partition label 1280 1281 Input Parameters: 1282 + dm - The DM 1283 - label - DMLabel assinging ranks to remote roots 1284 1285 Level: developer 1286 1287 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 1288 @*/ 1289 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label) 1290 { 1291 IS rankIS, pointIS; 1292 const PetscInt *ranks, *points; 1293 PetscInt numRanks, numPoints, r, p, c, closureSize; 1294 PetscInt *closure = NULL; 1295 PetscErrorCode ierr; 1296 1297 PetscFunctionBegin; 1298 ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr); 1299 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 1300 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 1301 for (r = 0; r < numRanks; ++r) { 1302 const PetscInt rank = ranks[r]; 1303 1304 ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr); 1305 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 1306 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 1307 for (p = 0; p < numPoints; ++p) { 1308 ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr); 1309 for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);} 1310 } 1311 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 1312 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 1313 } 1314 if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);} 1315 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 1316 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 1317 PetscFunctionReturn(0); 1318 } 1319 1320 #undef __FUNCT__ 1321 #define __FUNCT__ "DMPlexPartitionLabelAdjacency" 1322 /*@ 1323 DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label 1324 1325 Input Parameters: 1326 + dm - The DM 1327 - label - DMLabel assinging ranks to remote roots 1328 1329 Level: developer 1330 1331 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 1332 @*/ 1333 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label) 1334 { 1335 IS rankIS, pointIS; 1336 const PetscInt *ranks, *points; 1337 PetscInt numRanks, numPoints, r, p, a, adjSize; 1338 PetscInt *adj = NULL; 1339 PetscErrorCode ierr; 1340 1341 PetscFunctionBegin; 1342 ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr); 1343 ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr); 1344 ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr); 1345 for (r = 0; r < numRanks; ++r) { 1346 const PetscInt rank = ranks[r]; 1347 1348 ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr); 1349 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 1350 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 1351 for (p = 0; p < numPoints; ++p) { 1352 adjSize = PETSC_DETERMINE; 1353 ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr); 1354 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);} 1355 } 1356 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 1357 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 1358 } 1359 ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr); 1360 ierr = ISDestroy(&rankIS);CHKERRQ(ierr); 1361 ierr = PetscFree(adj);CHKERRQ(ierr); 1362 PetscFunctionReturn(0); 1363 } 1364 1365 #undef __FUNCT__ 1366 #define __FUNCT__ "DMPlexPartitionLabelInvert" 1367 /*@ 1368 DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label 1369 1370 Input Parameters: 1371 + dm - The DM 1372 . rootLabel - DMLabel assinging ranks to local roots 1373 . processSF - A star forest mapping into the local index on each remote rank 1374 1375 Output Parameter: 1376 - leafLabel - DMLabel assinging ranks to remote roots 1377 1378 Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The 1379 resulting leafLabel is a receiver mapping of remote roots to their parent rank. 1380 1381 Level: developer 1382 1383 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap 1384 @*/ 1385 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel) 1386 { 1387 MPI_Comm comm; 1388 PetscMPIInt rank, numProcs; 1389 PetscInt p, n, numNeighbors, size, l, nleaves; 1390 PetscSF sfPoint; 1391 PetscSFNode *rootPoints, *leafPoints; 1392 PetscSection rootSection, leafSection; 1393 const PetscSFNode *remote; 1394 const PetscInt *local, *neighbors; 1395 IS valueIS; 1396 PetscErrorCode ierr; 1397 1398 PetscFunctionBegin; 1399 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 1400 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1401 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 1402 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 1403 1404 /* Convert to (point, rank) and use actual owners */ 1405 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 1406 ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr); 1407 ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr); 1408 ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr); 1409 ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr); 1410 for (n = 0; n < numNeighbors; ++n) { 1411 PetscInt numPoints; 1412 1413 ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr); 1414 ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr); 1415 } 1416 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 1417 ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr); 1418 ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr); 1419 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 1420 for (n = 0; n < numNeighbors; ++n) { 1421 IS pointIS; 1422 const PetscInt *points; 1423 PetscInt off, numPoints, p; 1424 1425 ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr); 1426 ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr); 1427 ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr); 1428 ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr); 1429 for (p = 0; p < numPoints; ++p) { 1430 if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);} 1431 else {l = -1;} 1432 if (l >= 0) {rootPoints[off+p] = remote[l];} 1433 else {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;} 1434 } 1435 ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr); 1436 ierr = ISDestroy(&pointIS);CHKERRQ(ierr); 1437 } 1438 ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr); 1439 ierr = ISDestroy(&valueIS);CHKERRQ(ierr); 1440 /* Communicate overlap */ 1441 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr); 1442 ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr); 1443 /* Filter remote contributions (ovLeafPoints) into the overlapSF */ 1444 ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr); 1445 for (p = 0; p < size; p++) { 1446 ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr); 1447 } 1448 ierr = PetscFree(rootPoints);CHKERRQ(ierr); 1449 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 1450 ierr = PetscFree(leafPoints);CHKERRQ(ierr); 1451 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 1452 PetscFunctionReturn(0); 1453 } 1454 1455 #undef __FUNCT__ 1456 #define __FUNCT__ "DMPlexPartitionLabelCreateSF" 1457 /*@ 1458 DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points 1459 1460 Input Parameters: 1461 + dm - The DM 1462 . label - DMLabel assinging ranks to remote roots 1463 1464 Output Parameter: 1465 - sf - The star forest communication context encapsulating the defined mapping 1466 1467 Note: The incoming label is a receiver mapping of remote points to their parent rank. 1468 1469 Level: developer 1470 1471 .seealso: DMPlexDistribute(), DMPlexCreateOverlap 1472 @*/ 1473 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf) 1474 { 1475 PetscMPIInt numProcs; 1476 PetscInt n, idx, numRemote, p, numPoints, pStart, pEnd; 1477 PetscSFNode *remotePoints; 1478 PetscErrorCode ierr; 1479 1480 PetscFunctionBegin; 1481 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 1482 1483 for (numRemote = 0, n = 0; n < numProcs; ++n) { 1484 ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr); 1485 numRemote += numPoints; 1486 } 1487 ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr); 1488 for (idx = 0, n = 0; n < numProcs; ++n) { 1489 IS remoteRootIS; 1490 const PetscInt *remoteRoots; 1491 ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr); 1492 if (numPoints <= 0) continue; 1493 ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr); 1494 ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 1495 for (p = 0; p < numPoints; p++) { 1496 remotePoints[idx].index = remoteRoots[p]; 1497 remotePoints[idx].rank = n; 1498 idx++; 1499 } 1500 ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr); 1501 ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr); 1502 } 1503 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr); 1504 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 1505 ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 1506 PetscFunctionReturn(0); 1507 } 1508