xref: /petsc/src/dm/impls/plex/plexpartition.c (revision cc12936ac9377d1b0cd35ec09a88dcc1b6e8f816) !
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 /*@C
31   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
32 
33   Input Parameters:
34 + dm      - The mesh DM dm
35 - height  - Height of the strata from which to construct the graph
36 
37   Output Parameter:
38 + numVertices     - Number of vertices in the graph
39 . offsets         - Point offsets in the graph
40 . adjacency       - Point connectivity in the graph
41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
52 {
53   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscInt       *adjCells = NULL, *remoteCells = NULL;
62   const PetscInt *local;
63   PetscInt       nroots, nleaves, l;
64   PetscMPIInt    rank;
65   PetscErrorCode ierr;
66 
67   PetscFunctionBegin;
68   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
69   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
70   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
71   if (dim != depth) {
72     /* We do not handle the uninterpolated case here */
73     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
74     /* DMPlexCreateNeighborCSR does not make a numbering */
75     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
76     /* Different behavior for empty graphs */
77     if (!*numVertices) {
78       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
79       (*offsets)[0] = 0;
80     }
81     /* Broken in parallel */
82     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
83     PetscFunctionReturn(0);
84   }
85   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
86   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
87   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
88   /* Build adjacency graph via a section/segbuffer */
89   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
90   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
91   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
92   /* Always use FVM adjacency to create partitioner graph */
93   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
94   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
95   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
96   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
97   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
98   if (globalNumbering) {
99     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
100     *globalNumbering = cellNumbering;
101   }
102   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
103   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
104      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
105    */
106   ierr = PetscSFGetGraph(dm->sf, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
107   if (nroots >= 0) {
108     PetscInt fStart, fEnd, f;
109 
110     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
111     ierr = DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd);CHKERRQ(ierr);
112     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
113     for (f = fStart; f < fEnd; ++f) {
114       const PetscInt *support;
115       PetscInt        supportSize;
116 
117       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
118       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
119       if (supportSize == 1) adjCells[f] = cellNum[support[0]];
120       else if (supportSize == 2) {
121         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
122         if (p >= 0) adjCells[f] = cellNum[support[1]];
123         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
124         if (p >= 0) adjCells[f] = cellNum[support[0]];
125       }
126     }
127     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
128     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
129     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
130     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
131     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
132   }
133   /* Combine local and global adjacencies */
134   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
135     const PetscInt *cone;
136     PetscInt        coneSize, c;
137 
138     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
139     if (nroots > 0) {if (cellNum[p] < 0) continue;}
140     /* Add remote cells */
141     if (remoteCells) {
142       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
143       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
144       for (c = 0; c < coneSize; ++c) {
145         if (remoteCells[cone[c]] != -1) {
146           PetscInt *PETSC_RESTRICT pBuf;
147 
148           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
149           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
150           *pBuf = remoteCells[cone[c]];
151         }
152       }
153     }
154     /* Add local cells */
155     adjSize = PETSC_DETERMINE;
156     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
157     for (a = 0; a < adjSize; ++a) {
158       const PetscInt point = adj[a];
159       if (point != p && pStart <= point && point < pEnd) {
160         PetscInt *PETSC_RESTRICT pBuf;
161         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
162         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
163         *pBuf = cellNum[point];
164       }
165     }
166     (*numVertices)++;
167   }
168   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
169   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
170   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
171   /* Derive CSR graph from section/segbuffer */
172   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
173   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
174   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
175   for (idx = 0, p = pStart; p < pEnd; p++) {
176     if (nroots > 0) {if (cellNum[p] < 0) continue;}
177     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
178   }
179   vOffsets[*numVertices] = size;
180   if (offsets) *offsets = vOffsets;
181   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
182   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
183   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
184   if (adjacency) *adjacency = graph;
185   /* Clean up */
186   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
187   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
188   ierr = PetscFree(adj);CHKERRQ(ierr);
189   PetscFunctionReturn(0);
190 }
191 
192 /*@C
193   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
194 
195   Collective
196 
197   Input Arguments:
198 + dm - The DMPlex
199 - cellHeight - The height of mesh points to treat as cells (default should be 0)
200 
201   Output Arguments:
202 + numVertices - The number of local vertices in the graph, or cells in the mesh.
203 . offsets     - The offset to the adjacency list for each cell
204 - adjacency   - The adjacency list for all cells
205 
206   Note: This is suitable for input to a mesh partitioner like ParMetis.
207 
208   Level: advanced
209 
210 .seealso: DMPlexCreate()
211 @*/
212 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
213 {
214   const PetscInt maxFaceCases = 30;
215   PetscInt       numFaceCases = 0;
216   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
217   PetscInt      *off, *adj;
218   PetscInt      *neighborCells = NULL;
219   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
220   PetscErrorCode ierr;
221 
222   PetscFunctionBegin;
223   /* For parallel partitioning, I think you have to communicate supports */
224   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
225   cellDim = dim - cellHeight;
226   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
227   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
228   if (cEnd - cStart == 0) {
229     if (numVertices) *numVertices = 0;
230     if (offsets)   *offsets   = NULL;
231     if (adjacency) *adjacency = NULL;
232     PetscFunctionReturn(0);
233   }
234   numCells  = cEnd - cStart;
235   faceDepth = depth - cellHeight;
236   if (dim == depth) {
237     PetscInt f, fStart, fEnd;
238 
239     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
240     /* Count neighboring cells */
241     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
242     for (f = fStart; f < fEnd; ++f) {
243       const PetscInt *support;
244       PetscInt        supportSize;
245       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
246       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
247       if (supportSize == 2) {
248         PetscInt numChildren;
249 
250         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
251         if (!numChildren) {
252           ++off[support[0]-cStart+1];
253           ++off[support[1]-cStart+1];
254         }
255       }
256     }
257     /* Prefix sum */
258     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
259     if (adjacency) {
260       PetscInt *tmp;
261 
262       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
263       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
264       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
265       /* Get neighboring cells */
266       for (f = fStart; f < fEnd; ++f) {
267         const PetscInt *support;
268         PetscInt        supportSize;
269         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
270         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
271         if (supportSize == 2) {
272           PetscInt numChildren;
273 
274           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
275           if (!numChildren) {
276             adj[tmp[support[0]-cStart]++] = support[1];
277             adj[tmp[support[1]-cStart]++] = support[0];
278           }
279         }
280       }
281 #if defined(PETSC_USE_DEBUG)
282       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
283 #endif
284       ierr = PetscFree(tmp);CHKERRQ(ierr);
285     }
286     if (numVertices) *numVertices = numCells;
287     if (offsets)   *offsets   = off;
288     if (adjacency) *adjacency = adj;
289     PetscFunctionReturn(0);
290   }
291   /* Setup face recognition */
292   if (faceDepth == 1) {
293     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
294 
295     for (c = cStart; c < cEnd; ++c) {
296       PetscInt corners;
297 
298       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
299       if (!cornersSeen[corners]) {
300         PetscInt nFV;
301 
302         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
303         cornersSeen[corners] = 1;
304 
305         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
306 
307         numFaceVertices[numFaceCases++] = nFV;
308       }
309     }
310   }
311   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
312   /* Count neighboring cells */
313   for (cell = cStart; cell < cEnd; ++cell) {
314     PetscInt numNeighbors = PETSC_DETERMINE, n;
315 
316     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
317     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
318     for (n = 0; n < numNeighbors; ++n) {
319       PetscInt        cellPair[2];
320       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
321       PetscInt        meetSize = 0;
322       const PetscInt *meet    = NULL;
323 
324       cellPair[0] = cell; cellPair[1] = neighborCells[n];
325       if (cellPair[0] == cellPair[1]) continue;
326       if (!found) {
327         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
328         if (meetSize) {
329           PetscInt f;
330 
331           for (f = 0; f < numFaceCases; ++f) {
332             if (numFaceVertices[f] == meetSize) {
333               found = PETSC_TRUE;
334               break;
335             }
336           }
337         }
338         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
339       }
340       if (found) ++off[cell-cStart+1];
341     }
342   }
343   /* Prefix sum */
344   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
345 
346   if (adjacency) {
347     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
348     /* Get neighboring cells */
349     for (cell = cStart; cell < cEnd; ++cell) {
350       PetscInt numNeighbors = PETSC_DETERMINE, n;
351       PetscInt cellOffset   = 0;
352 
353       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
354       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
355       for (n = 0; n < numNeighbors; ++n) {
356         PetscInt        cellPair[2];
357         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
358         PetscInt        meetSize = 0;
359         const PetscInt *meet    = NULL;
360 
361         cellPair[0] = cell; cellPair[1] = neighborCells[n];
362         if (cellPair[0] == cellPair[1]) continue;
363         if (!found) {
364           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
365           if (meetSize) {
366             PetscInt f;
367 
368             for (f = 0; f < numFaceCases; ++f) {
369               if (numFaceVertices[f] == meetSize) {
370                 found = PETSC_TRUE;
371                 break;
372               }
373             }
374           }
375           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
376         }
377         if (found) {
378           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
379           ++cellOffset;
380         }
381       }
382     }
383   }
384   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
385   if (numVertices) *numVertices = numCells;
386   if (offsets)   *offsets   = off;
387   if (adjacency) *adjacency = adj;
388   PetscFunctionReturn(0);
389 }
390 
391 /*@C
392   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
393 
394   Not Collective
395 
396   Input Parameters:
397 + name        - The name of a new user-defined creation routine
398 - create_func - The creation routine itself
399 
400   Notes:
401   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
402 
403   Sample usage:
404 .vb
405     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
406 .ve
407 
408   Then, your PetscPartitioner type can be chosen with the procedural interface via
409 .vb
410     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
411     PetscPartitionerSetType(PetscPartitioner, "my_part");
412 .ve
413    or at runtime via the option
414 .vb
415     -petscpartitioner_type my_part
416 .ve
417 
418   Level: advanced
419 
420 .keywords: PetscPartitioner, register
421 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
422 
423 @*/
424 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
425 {
426   PetscErrorCode ierr;
427 
428   PetscFunctionBegin;
429   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
430   PetscFunctionReturn(0);
431 }
432 
433 /*@C
434   PetscPartitionerSetType - Builds a particular PetscPartitioner
435 
436   Collective on PetscPartitioner
437 
438   Input Parameters:
439 + part - The PetscPartitioner object
440 - name - The kind of partitioner
441 
442   Options Database Key:
443 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
444 
445   Level: intermediate
446 
447 .keywords: PetscPartitioner, set, type
448 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
449 @*/
450 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
451 {
452   PetscErrorCode (*r)(PetscPartitioner);
453   PetscBool      match;
454   PetscErrorCode ierr;
455 
456   PetscFunctionBegin;
457   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
458   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
459   if (match) PetscFunctionReturn(0);
460 
461   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
462   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
463   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
464 
465   if (part->ops->destroy) {
466     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
467   }
468   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
469   ierr = (*r)(part);CHKERRQ(ierr);
470   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
471   PetscFunctionReturn(0);
472 }
473 
474 /*@C
475   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
476 
477   Not Collective
478 
479   Input Parameter:
480 . part - The PetscPartitioner
481 
482   Output Parameter:
483 . name - The PetscPartitioner type name
484 
485   Level: intermediate
486 
487 .keywords: PetscPartitioner, get, type, name
488 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
489 @*/
490 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
491 {
492   PetscErrorCode ierr;
493 
494   PetscFunctionBegin;
495   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
496   PetscValidPointer(name, 2);
497   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
498   *name = ((PetscObject) part)->type_name;
499   PetscFunctionReturn(0);
500 }
501 
502 /*@C
503   PetscPartitionerView - Views a PetscPartitioner
504 
505   Collective on PetscPartitioner
506 
507   Input Parameter:
508 + part - the PetscPartitioner object to view
509 - v    - the viewer
510 
511   Level: developer
512 
513 .seealso: PetscPartitionerDestroy()
514 @*/
515 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
516 {
517   PetscMPIInt    size;
518   PetscBool      isascii;
519   PetscErrorCode ierr;
520 
521   PetscFunctionBegin;
522   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
523   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
524   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
525   if (isascii) {
526     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
527     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
528     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
529     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
530     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
531     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
532     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
533   }
534   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
535   PetscFunctionReturn(0);
536 }
537 
538 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
539 {
540   PetscFunctionBegin;
541   if (!currentType) {
542 #if defined(PETSC_HAVE_CHACO)
543     *defaultType = PETSCPARTITIONERCHACO;
544 #elif defined(PETSC_HAVE_PARMETIS)
545     *defaultType = PETSCPARTITIONERPARMETIS;
546 #elif defined(PETSC_HAVE_PTSCOTCH)
547     *defaultType = PETSCPARTITIONERPTSCOTCH;
548 #else
549     *defaultType = PETSCPARTITIONERSIMPLE;
550 #endif
551   } else {
552     *defaultType = currentType;
553   }
554   PetscFunctionReturn(0);
555 }
556 
557 /*@
558   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
559 
560   Collective on PetscPartitioner
561 
562   Input Parameter:
563 . part - the PetscPartitioner object to set options for
564 
565   Level: developer
566 
567 .seealso: PetscPartitionerView()
568 @*/
569 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
570 {
571   const char    *defaultType;
572   char           name[256];
573   PetscBool      flg;
574   PetscErrorCode ierr;
575 
576   PetscFunctionBegin;
577   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
578   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
579   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
580   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
581   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
582   if (flg) {
583     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
584   } else if (!((PetscObject) part)->type_name) {
585     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
586   }
587   if (part->ops->setfromoptions) {
588     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
589   }
590   /* process any options handlers added with PetscObjectAddOptionsHandler() */
591   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
592   ierr = PetscOptionsEnd();CHKERRQ(ierr);
593   PetscFunctionReturn(0);
594 }
595 
596 /*@C
597   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
598 
599   Collective on PetscPartitioner
600 
601   Input Parameter:
602 . part - the PetscPartitioner object to setup
603 
604   Level: developer
605 
606 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
607 @*/
608 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
609 {
610   PetscErrorCode ierr;
611 
612   PetscFunctionBegin;
613   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
614   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
615   PetscFunctionReturn(0);
616 }
617 
618 /*@
619   PetscPartitionerDestroy - Destroys a PetscPartitioner object
620 
621   Collective on PetscPartitioner
622 
623   Input Parameter:
624 . part - the PetscPartitioner object to destroy
625 
626   Level: developer
627 
628 .seealso: PetscPartitionerView()
629 @*/
630 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
631 {
632   PetscErrorCode ierr;
633 
634   PetscFunctionBegin;
635   if (!*part) PetscFunctionReturn(0);
636   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
637 
638   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
639   ((PetscObject) (*part))->refct = 0;
640 
641   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
642   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
643   PetscFunctionReturn(0);
644 }
645 
646 /*@
647   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
648 
649   Collective on MPI_Comm
650 
651   Input Parameter:
652 . comm - The communicator for the PetscPartitioner object
653 
654   Output Parameter:
655 . part - The PetscPartitioner object
656 
657   Level: beginner
658 
659 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
660 @*/
661 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
662 {
663   PetscPartitioner p;
664   const char       *partitionerType = NULL;
665   PetscErrorCode   ierr;
666 
667   PetscFunctionBegin;
668   PetscValidPointer(part, 2);
669   *part = NULL;
670   ierr = DMInitializePackage();CHKERRQ(ierr);
671 
672   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
673   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
674   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
675 
676   p->edgeCut = 0;
677   p->balance = 0.0;
678 
679   *part = p;
680   PetscFunctionReturn(0);
681 }
682 
683 /*@
684   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
685 
686   Collective on DM
687 
688   Input Parameters:
689 + part    - The PetscPartitioner
690 - dm      - The mesh DM
691 
692   Output Parameters:
693 + partSection     - The PetscSection giving the division of points by partition
694 - partition       - The list of points by partition
695 
696   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
697 
698   Level: developer
699 
700 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
701 @*/
702 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
703 {
704   PetscMPIInt    size;
705   PetscErrorCode ierr;
706 
707   PetscFunctionBegin;
708   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
709   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
710   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
711   PetscValidPointer(partition, 5);
712   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
713   if (size == 1) {
714     PetscInt *points;
715     PetscInt  cStart, cEnd, c;
716 
717     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
718     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
719     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
720     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
721     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
722     for (c = cStart; c < cEnd; ++c) points[c] = c;
723     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
724     PetscFunctionReturn(0);
725   }
726   if (part->height == 0) {
727     PetscInt numVertices;
728     PetscInt *start     = NULL;
729     PetscInt *adjacency = NULL;
730     IS       globalNumbering;
731 
732     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
733     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
734     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
735     ierr = PetscFree(start);CHKERRQ(ierr);
736     ierr = PetscFree(adjacency);CHKERRQ(ierr);
737     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
738       const PetscInt *globalNum;
739       const PetscInt *partIdx;
740       PetscInt *map, cStart, cEnd;
741       PetscInt *adjusted, i, localSize, offset;
742       IS    newPartition;
743 
744       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
745       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
746       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
747       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
748       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
749       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
750       for (i = cStart, offset = 0; i < cEnd; i++) {
751         if (globalNum[i - cStart] >= 0) map[offset++] = i;
752       }
753       for (i = 0; i < localSize; i++) {
754         adjusted[i] = map[partIdx[i]];
755       }
756       ierr = PetscFree(map);CHKERRQ(ierr);
757       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
758       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
759       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
760       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
761       ierr = ISDestroy(partition);CHKERRQ(ierr);
762       *partition = newPartition;
763     }
764   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
765   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
766   PetscFunctionReturn(0);
767 }
768 
769 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
770 {
771   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
772   PetscErrorCode          ierr;
773 
774   PetscFunctionBegin;
775   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
776   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
777   ierr = PetscFree(p);CHKERRQ(ierr);
778   PetscFunctionReturn(0);
779 }
780 
781 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
782 {
783   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
784   PetscErrorCode          ierr;
785 
786   PetscFunctionBegin;
787   if (p->random) {
788     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
789     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
790     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
791   }
792   PetscFunctionReturn(0);
793 }
794 
795 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
796 {
797   PetscBool      iascii;
798   PetscErrorCode ierr;
799 
800   PetscFunctionBegin;
801   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
802   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
803   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
804   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
805   PetscFunctionReturn(0);
806 }
807 
808 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
809 {
810   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
811   PetscErrorCode          ierr;
812 
813   PetscFunctionBegin;
814   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
815   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
816   ierr = PetscOptionsTail();CHKERRQ(ierr);
817   PetscFunctionReturn(0);
818 }
819 
820 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
821 {
822   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
823   PetscInt                np;
824   PetscErrorCode          ierr;
825 
826   PetscFunctionBegin;
827   if (p->random) {
828     PetscRandom r;
829     PetscInt   *sizes, *points, v, p;
830     PetscMPIInt rank;
831 
832     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
833     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
834     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
835     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
836     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
837     for (v = 0; v < numVertices; ++v) {points[v] = v;}
838     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
839     for (v = numVertices-1; v > 0; --v) {
840       PetscReal val;
841       PetscInt  w, tmp;
842 
843       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
844       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
845       w    = PetscFloorReal(val);
846       tmp       = points[v];
847       points[v] = points[w];
848       points[w] = tmp;
849     }
850     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
851     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
852     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
853   }
854   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
855   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
856   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
857   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
858   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
859   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
860   *partition = p->partition;
861   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
862   PetscFunctionReturn(0);
863 }
864 
865 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
866 {
867   PetscFunctionBegin;
868   part->ops->view           = PetscPartitionerView_Shell;
869   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
870   part->ops->destroy        = PetscPartitionerDestroy_Shell;
871   part->ops->partition      = PetscPartitionerPartition_Shell;
872   PetscFunctionReturn(0);
873 }
874 
875 /*MC
876   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
877 
878   Level: intermediate
879 
880 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
881 M*/
882 
883 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
884 {
885   PetscPartitioner_Shell *p;
886   PetscErrorCode          ierr;
887 
888   PetscFunctionBegin;
889   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
890   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
891   part->data = p;
892 
893   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
894   p->random = PETSC_FALSE;
895   PetscFunctionReturn(0);
896 }
897 
898 /*@C
899   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
900 
901   Collective on Part
902 
903   Input Parameters:
904 + part     - The PetscPartitioner
905 . size - The number of partitions
906 . sizes    - array of size size (or NULL) providing the number of points in each partition
907 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
908 
909   Level: developer
910 
911   Notes:
912 
913     It is safe to free the sizes and points arrays after use in this routine.
914 
915 .seealso DMPlexDistribute(), PetscPartitionerCreate()
916 @*/
917 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
918 {
919   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
920   PetscInt                proc, numPoints;
921   PetscErrorCode          ierr;
922 
923   PetscFunctionBegin;
924   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
925   if (sizes)  {PetscValidPointer(sizes, 3);}
926   if (points) {PetscValidPointer(points, 4);}
927   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
928   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
929   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
930   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
931   if (sizes) {
932     for (proc = 0; proc < size; ++proc) {
933       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
934     }
935   }
936   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
937   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
938   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 /*@
943   PetscPartitionerShellSetRandom - Set the flag to use a random partition
944 
945   Collective on Part
946 
947   Input Parameters:
948 + part   - The PetscPartitioner
949 - random - The flag to use a random partition
950 
951   Level: intermediate
952 
953 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
954 @*/
955 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
956 {
957   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
958 
959   PetscFunctionBegin;
960   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
961   p->random = random;
962   PetscFunctionReturn(0);
963 }
964 
965 /*@
966   PetscPartitionerShellGetRandom - get the flag to use a random partition
967 
968   Collective on Part
969 
970   Input Parameter:
971 . part   - The PetscPartitioner
972 
973   Output Parameter
974 . random - The flag to use a random partition
975 
976   Level: intermediate
977 
978 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
979 @*/
980 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
981 {
982   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
983 
984   PetscFunctionBegin;
985   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
986   PetscValidPointer(random, 2);
987   *random = p->random;
988   PetscFunctionReturn(0);
989 }
990 
991 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
992 {
993   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
994   PetscErrorCode          ierr;
995 
996   PetscFunctionBegin;
997   ierr = PetscFree(p);CHKERRQ(ierr);
998   PetscFunctionReturn(0);
999 }
1000 
1001 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1002 {
1003   PetscFunctionBegin;
1004   PetscFunctionReturn(0);
1005 }
1006 
1007 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1008 {
1009   PetscBool      iascii;
1010   PetscErrorCode ierr;
1011 
1012   PetscFunctionBegin;
1013   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1014   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1015   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1016   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1017   PetscFunctionReturn(0);
1018 }
1019 
1020 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1021 {
1022   MPI_Comm       comm;
1023   PetscInt       np;
1024   PetscMPIInt    size;
1025   PetscErrorCode ierr;
1026 
1027   PetscFunctionBegin;
1028   comm = PetscObjectComm((PetscObject)dm);
1029   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1030   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1031   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1032   if (size == 1) {
1033     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1034   }
1035   else {
1036     PetscMPIInt rank;
1037     PetscInt nvGlobal, *offsets, myFirst, myLast;
1038 
1039     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1040     offsets[0] = 0;
1041     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1042     for (np = 2; np <= size; np++) {
1043       offsets[np] += offsets[np-1];
1044     }
1045     nvGlobal = offsets[size];
1046     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1047     myFirst = offsets[rank];
1048     myLast  = offsets[rank + 1] - 1;
1049     ierr = PetscFree(offsets);CHKERRQ(ierr);
1050     if (numVertices) {
1051       PetscInt firstPart = 0, firstLargePart = 0;
1052       PetscInt lastPart = 0, lastLargePart = 0;
1053       PetscInt rem = nvGlobal % nparts;
1054       PetscInt pSmall = nvGlobal/nparts;
1055       PetscInt pBig = nvGlobal/nparts + 1;
1056 
1057 
1058       if (rem) {
1059         firstLargePart = myFirst / pBig;
1060         lastLargePart  = myLast  / pBig;
1061 
1062         if (firstLargePart < rem) {
1063           firstPart = firstLargePart;
1064         }
1065         else {
1066           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1067         }
1068         if (lastLargePart < rem) {
1069           lastPart = lastLargePart;
1070         }
1071         else {
1072           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1073         }
1074       }
1075       else {
1076         firstPart = myFirst / (nvGlobal/nparts);
1077         lastPart  = myLast  / (nvGlobal/nparts);
1078       }
1079 
1080       for (np = firstPart; np <= lastPart; np++) {
1081         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1082         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1083 
1084         PartStart = PetscMax(PartStart,myFirst);
1085         PartEnd   = PetscMin(PartEnd,myLast+1);
1086         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1087       }
1088     }
1089   }
1090   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1091   PetscFunctionReturn(0);
1092 }
1093 
1094 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1095 {
1096   PetscFunctionBegin;
1097   part->ops->view      = PetscPartitionerView_Simple;
1098   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1099   part->ops->partition = PetscPartitionerPartition_Simple;
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 /*MC
1104   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1105 
1106   Level: intermediate
1107 
1108 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1109 M*/
1110 
1111 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1112 {
1113   PetscPartitioner_Simple *p;
1114   PetscErrorCode           ierr;
1115 
1116   PetscFunctionBegin;
1117   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1118   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1119   part->data = p;
1120 
1121   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1122   PetscFunctionReturn(0);
1123 }
1124 
1125 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1126 {
1127   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1128   PetscErrorCode          ierr;
1129 
1130   PetscFunctionBegin;
1131   ierr = PetscFree(p);CHKERRQ(ierr);
1132   PetscFunctionReturn(0);
1133 }
1134 
1135 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1136 {
1137   PetscFunctionBegin;
1138   PetscFunctionReturn(0);
1139 }
1140 
1141 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1142 {
1143   PetscBool      iascii;
1144   PetscErrorCode ierr;
1145 
1146   PetscFunctionBegin;
1147   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1148   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1149   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1150   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1151   PetscFunctionReturn(0);
1152 }
1153 
1154 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1155 {
1156   PetscInt       np;
1157   PetscErrorCode ierr;
1158 
1159   PetscFunctionBegin;
1160   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1161   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1162   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1163   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1164   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1169 {
1170   PetscFunctionBegin;
1171   part->ops->view      = PetscPartitionerView_Gather;
1172   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1173   part->ops->partition = PetscPartitionerPartition_Gather;
1174   PetscFunctionReturn(0);
1175 }
1176 
1177 /*MC
1178   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1179 
1180   Level: intermediate
1181 
1182 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1183 M*/
1184 
1185 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1186 {
1187   PetscPartitioner_Gather *p;
1188   PetscErrorCode           ierr;
1189 
1190   PetscFunctionBegin;
1191   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1192   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1193   part->data = p;
1194 
1195   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1196   PetscFunctionReturn(0);
1197 }
1198 
1199 
1200 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1201 {
1202   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1203   PetscErrorCode          ierr;
1204 
1205   PetscFunctionBegin;
1206   ierr = PetscFree(p);CHKERRQ(ierr);
1207   PetscFunctionReturn(0);
1208 }
1209 
1210 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1211 {
1212   PetscFunctionBegin;
1213   PetscFunctionReturn(0);
1214 }
1215 
1216 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1217 {
1218   PetscBool      iascii;
1219   PetscErrorCode ierr;
1220 
1221   PetscFunctionBegin;
1222   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1223   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1224   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1225   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1226   PetscFunctionReturn(0);
1227 }
1228 
1229 #if defined(PETSC_HAVE_CHACO)
1230 #if defined(PETSC_HAVE_UNISTD_H)
1231 #include <unistd.h>
1232 #endif
1233 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1234 #include <chaco.h>
1235 #else
1236 /* Older versions of Chaco do not have an include file */
1237 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1238                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1239                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1240                        int mesh_dims[3], double *goal, int global_method, int local_method,
1241                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1242 #endif
1243 extern int FREE_GRAPH;
1244 #endif
1245 
1246 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1247 {
1248 #if defined(PETSC_HAVE_CHACO)
1249   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1250   MPI_Comm       comm;
1251   int            nvtxs          = numVertices; /* number of vertices in full graph */
1252   int           *vwgts          = NULL;   /* weights for all vertices */
1253   float         *ewgts          = NULL;   /* weights for all edges */
1254   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1255   char          *outassignname  = NULL;   /*  name of assignment output file */
1256   char          *outfilename    = NULL;   /* output file name */
1257   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1258   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1259   int            mesh_dims[3];            /* dimensions of mesh of processors */
1260   double        *goal          = NULL;    /* desired set sizes for each set */
1261   int            global_method = 1;       /* global partitioning algorithm */
1262   int            local_method  = 1;       /* local partitioning algorithm */
1263   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1264   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1265   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1266   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1267   long           seed          = 123636512; /* for random graph mutations */
1268 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1269   int           *assignment;              /* Output partition */
1270 #else
1271   short int     *assignment;              /* Output partition */
1272 #endif
1273   int            fd_stdout, fd_pipe[2];
1274   PetscInt      *points;
1275   int            i, v, p;
1276   PetscErrorCode ierr;
1277 
1278   PetscFunctionBegin;
1279   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1280 #if defined (PETSC_USE_DEBUG)
1281   {
1282     int ival,isum;
1283     PetscBool distributed;
1284 
1285     ival = (numVertices > 0);
1286     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1287     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1288     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1289   }
1290 #endif
1291   if (!numVertices) {
1292     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1293     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1294     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1295     PetscFunctionReturn(0);
1296   }
1297   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1298   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1299 
1300   if (global_method == INERTIAL_METHOD) {
1301     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1302     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1303   }
1304   mesh_dims[0] = nparts;
1305   mesh_dims[1] = 1;
1306   mesh_dims[2] = 1;
1307   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1308   /* Chaco outputs to stdout. We redirect this to a buffer. */
1309   /* TODO: check error codes for UNIX calls */
1310 #if defined(PETSC_HAVE_UNISTD_H)
1311   {
1312     int piperet;
1313     piperet = pipe(fd_pipe);
1314     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1315     fd_stdout = dup(1);
1316     close(1);
1317     dup2(fd_pipe[1], 1);
1318   }
1319 #endif
1320   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1321                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1322                    vmax, ndims, eigtol, seed);
1323 #if defined(PETSC_HAVE_UNISTD_H)
1324   {
1325     char msgLog[10000];
1326     int  count;
1327 
1328     fflush(stdout);
1329     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1330     if (count < 0) count = 0;
1331     msgLog[count] = 0;
1332     close(1);
1333     dup2(fd_stdout, 1);
1334     close(fd_stdout);
1335     close(fd_pipe[0]);
1336     close(fd_pipe[1]);
1337     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1338   }
1339 #else
1340   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1341 #endif
1342   /* Convert to PetscSection+IS */
1343   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1344   for (v = 0; v < nvtxs; ++v) {
1345     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1346   }
1347   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1348   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1349   for (p = 0, i = 0; p < nparts; ++p) {
1350     for (v = 0; v < nvtxs; ++v) {
1351       if (assignment[v] == p) points[i++] = v;
1352     }
1353   }
1354   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1355   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1356   if (global_method == INERTIAL_METHOD) {
1357     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1358   }
1359   ierr = PetscFree(assignment);CHKERRQ(ierr);
1360   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1361   PetscFunctionReturn(0);
1362 #else
1363   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1364 #endif
1365 }
1366 
1367 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1368 {
1369   PetscFunctionBegin;
1370   part->ops->view      = PetscPartitionerView_Chaco;
1371   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1372   part->ops->partition = PetscPartitionerPartition_Chaco;
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 /*MC
1377   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1378 
1379   Level: intermediate
1380 
1381 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1382 M*/
1383 
1384 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1385 {
1386   PetscPartitioner_Chaco *p;
1387   PetscErrorCode          ierr;
1388 
1389   PetscFunctionBegin;
1390   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1391   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1392   part->data = p;
1393 
1394   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1395   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1396   PetscFunctionReturn(0);
1397 }
1398 
1399 static const char *ptypes[] = {"kway", "rb"};
1400 
1401 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1402 {
1403   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1404   PetscErrorCode             ierr;
1405 
1406   PetscFunctionBegin;
1407   ierr = PetscFree(p);CHKERRQ(ierr);
1408   PetscFunctionReturn(0);
1409 }
1410 
1411 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1412 {
1413   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1414   PetscErrorCode             ierr;
1415 
1416   PetscFunctionBegin;
1417   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1418   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1419   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1420   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1421   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1426 {
1427   PetscBool      iascii;
1428   PetscErrorCode ierr;
1429 
1430   PetscFunctionBegin;
1431   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1432   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1433   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1434   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1435   PetscFunctionReturn(0);
1436 }
1437 
1438 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1439 {
1440   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1441   PetscErrorCode            ierr;
1442 
1443   PetscFunctionBegin;
1444   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1445   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1446   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1447   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1448   ierr = PetscOptionsTail();CHKERRQ(ierr);
1449   PetscFunctionReturn(0);
1450 }
1451 
1452 #if defined(PETSC_HAVE_PARMETIS)
1453 #include <parmetis.h>
1454 #endif
1455 
1456 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1457 {
1458 #if defined(PETSC_HAVE_PARMETIS)
1459   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1460   MPI_Comm       comm;
1461   PetscSection   section;
1462   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1463   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1464   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1465   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1466   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1467   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1468   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1469   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1470   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1471   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1472   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1473   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1474   PetscInt       options[64];               /* Options */
1475   /* Outputs */
1476   PetscInt       v, i, *assignment, *points;
1477   PetscMPIInt    size, rank, p;
1478   PetscErrorCode ierr;
1479 
1480   PetscFunctionBegin;
1481   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1482   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1483   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1484   /* Calculate vertex distribution */
1485   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1486   vtxdist[0] = 0;
1487   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1488   for (p = 2; p <= size; ++p) {
1489     vtxdist[p] += vtxdist[p-1];
1490   }
1491   /* Calculate partition weights */
1492   for (p = 0; p < nparts; ++p) {
1493     tpwgts[p] = 1.0/nparts;
1494   }
1495   ubvec[0] = pm->imbalanceRatio;
1496   /* Weight cells by dofs on cell by default */
1497   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1498   if (section) {
1499     PetscInt cStart, cEnd, dof;
1500 
1501     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1502     for (v = cStart; v < cEnd; ++v) {
1503       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1504       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1505       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1506       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1507     }
1508   } else {
1509     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1510   }
1511   wgtflag |= 2; /* have weights on graph vertices */
1512 
1513   if (nparts == 1) {
1514     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1515   } else {
1516     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1517     if (vtxdist[p+1] == vtxdist[size]) {
1518       if (rank == p) {
1519         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1520         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1521         if (metis_ptype == 1) {
1522           PetscStackPush("METIS_PartGraphRecursive");
1523           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1524           PetscStackPop;
1525           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1526         } else {
1527           /*
1528            It would be nice to activate the two options below, but they would need some actual testing.
1529            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1530            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1531           */
1532           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1533           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1534           PetscStackPush("METIS_PartGraphKway");
1535           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1536           PetscStackPop;
1537           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1538         }
1539       }
1540     } else {
1541       options[0] = 1;
1542       options[1] = pm->debugFlag;
1543       PetscStackPush("ParMETIS_V3_PartKway");
1544       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1545       PetscStackPop;
1546       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1547     }
1548   }
1549   /* Convert to PetscSection+IS */
1550   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1551   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1552   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1553   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1554   for (p = 0, i = 0; p < nparts; ++p) {
1555     for (v = 0; v < nvtxs; ++v) {
1556       if (assignment[v] == p) points[i++] = v;
1557     }
1558   }
1559   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1560   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1561   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1562   PetscFunctionReturn(0);
1563 #else
1564   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1565 #endif
1566 }
1567 
1568 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1569 {
1570   PetscFunctionBegin;
1571   part->ops->view           = PetscPartitionerView_ParMetis;
1572   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1573   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1574   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1575   PetscFunctionReturn(0);
1576 }
1577 
1578 /*MC
1579   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1580 
1581   Level: intermediate
1582 
1583 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1584 M*/
1585 
1586 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1587 {
1588   PetscPartitioner_ParMetis *p;
1589   PetscErrorCode          ierr;
1590 
1591   PetscFunctionBegin;
1592   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1593   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1594   part->data = p;
1595 
1596   p->ptype          = 0;
1597   p->imbalanceRatio = 1.05;
1598   p->debugFlag      = 0;
1599 
1600   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1601   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1602   PetscFunctionReturn(0);
1603 }
1604 
1605 
1606 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1607 const char PTScotchPartitionerCitation[] =
1608   "@article{PTSCOTCH,\n"
1609   "  author  = {C. Chevalier and F. Pellegrini},\n"
1610   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1611   "  journal = {Parallel Computing},\n"
1612   "  volume  = {34},\n"
1613   "  number  = {6},\n"
1614   "  pages   = {318--331},\n"
1615   "  year    = {2008},\n"
1616   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1617   "}\n";
1618 
1619 typedef struct {
1620   PetscInt  strategy;
1621   PetscReal imbalance;
1622 } PetscPartitioner_PTScotch;
1623 
1624 static const char *const
1625 PTScotchStrategyList[] = {
1626   "DEFAULT",
1627   "QUALITY",
1628   "SPEED",
1629   "BALANCE",
1630   "SAFETY",
1631   "SCALABILITY",
1632   "RECURSIVE",
1633   "REMAP"
1634 };
1635 
1636 #if defined(PETSC_HAVE_PTSCOTCH)
1637 
1638 EXTERN_C_BEGIN
1639 #include <ptscotch.h>
1640 EXTERN_C_END
1641 
1642 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1643 
1644 static int PTScotch_Strategy(PetscInt strategy)
1645 {
1646   switch (strategy) {
1647   case  0: return SCOTCH_STRATDEFAULT;
1648   case  1: return SCOTCH_STRATQUALITY;
1649   case  2: return SCOTCH_STRATSPEED;
1650   case  3: return SCOTCH_STRATBALANCE;
1651   case  4: return SCOTCH_STRATSAFETY;
1652   case  5: return SCOTCH_STRATSCALABILITY;
1653   case  6: return SCOTCH_STRATRECURSIVE;
1654   case  7: return SCOTCH_STRATREMAP;
1655   default: return SCOTCH_STRATDEFAULT;
1656   }
1657 }
1658 
1659 
1660 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1661                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1662 {
1663   SCOTCH_Graph   grafdat;
1664   SCOTCH_Strat   stradat;
1665   SCOTCH_Num     vertnbr = n;
1666   SCOTCH_Num     edgenbr = xadj[n];
1667   SCOTCH_Num*    velotab = vtxwgt;
1668   SCOTCH_Num*    edlotab = adjwgt;
1669   SCOTCH_Num     flagval = strategy;
1670   double         kbalval = imbalance;
1671   PetscErrorCode ierr;
1672 
1673   PetscFunctionBegin;
1674   {
1675     PetscBool flg = PETSC_TRUE;
1676     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1677     if (!flg) velotab = NULL;
1678   }
1679   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1680   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1681   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1682   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1683 #if defined(PETSC_USE_DEBUG)
1684   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1685 #endif
1686   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1687   SCOTCH_stratExit(&stradat);
1688   SCOTCH_graphExit(&grafdat);
1689   PetscFunctionReturn(0);
1690 }
1691 
1692 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1693                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1694 {
1695   PetscMPIInt     procglbnbr;
1696   PetscMPIInt     proclocnum;
1697   SCOTCH_Arch     archdat;
1698   SCOTCH_Dgraph   grafdat;
1699   SCOTCH_Dmapping mappdat;
1700   SCOTCH_Strat    stradat;
1701   SCOTCH_Num      vertlocnbr;
1702   SCOTCH_Num      edgelocnbr;
1703   SCOTCH_Num*     veloloctab = vtxwgt;
1704   SCOTCH_Num*     edloloctab = adjwgt;
1705   SCOTCH_Num      flagval = strategy;
1706   double          kbalval = imbalance;
1707   PetscErrorCode  ierr;
1708 
1709   PetscFunctionBegin;
1710   {
1711     PetscBool flg = PETSC_TRUE;
1712     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1713     if (!flg) veloloctab = NULL;
1714   }
1715   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1716   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1717   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1718   edgelocnbr = xadj[vertlocnbr];
1719 
1720   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1721   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1722 #if defined(PETSC_USE_DEBUG)
1723   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1724 #endif
1725   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1726   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1727   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1728   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1729   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1730   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1731   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1732   SCOTCH_archExit(&archdat);
1733   SCOTCH_stratExit(&stradat);
1734   SCOTCH_dgraphExit(&grafdat);
1735   PetscFunctionReturn(0);
1736 }
1737 
1738 #endif /* PETSC_HAVE_PTSCOTCH */
1739 
1740 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1741 {
1742   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1743   PetscErrorCode             ierr;
1744 
1745   PetscFunctionBegin;
1746   ierr = PetscFree(p);CHKERRQ(ierr);
1747   PetscFunctionReturn(0);
1748 }
1749 
1750 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1751 {
1752   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1753   PetscErrorCode            ierr;
1754 
1755   PetscFunctionBegin;
1756   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1757   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1758   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1759   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1760   PetscFunctionReturn(0);
1761 }
1762 
1763 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1764 {
1765   PetscBool      iascii;
1766   PetscErrorCode ierr;
1767 
1768   PetscFunctionBegin;
1769   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1770   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1771   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1772   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1773   PetscFunctionReturn(0);
1774 }
1775 
1776 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1777 {
1778   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1779   const char *const         *slist = PTScotchStrategyList;
1780   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1781   PetscBool                 flag;
1782   PetscErrorCode            ierr;
1783 
1784   PetscFunctionBegin;
1785   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1786   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1787   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1788   ierr = PetscOptionsTail();CHKERRQ(ierr);
1789   PetscFunctionReturn(0);
1790 }
1791 
1792 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1793 {
1794 #if defined(PETSC_HAVE_PTSCOTCH)
1795   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1796   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1797   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1798   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1799   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1800   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1801   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1802   PetscInt       v, i, *assignment, *points;
1803   PetscMPIInt    size, rank, p;
1804   PetscErrorCode ierr;
1805 
1806   PetscFunctionBegin;
1807   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1808   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1809   ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1810 
1811   /* Calculate vertex distribution */
1812   vtxdist[0] = 0;
1813   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1814   for (p = 2; p <= size; ++p) {
1815     vtxdist[p] += vtxdist[p-1];
1816   }
1817 
1818   if (nparts == 1) {
1819     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1820   } else {
1821     PetscSection section;
1822     /* Weight cells by dofs on cell by default */
1823     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1824     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1825     if (section) {
1826       PetscInt vStart, vEnd, dof;
1827       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1828       for (v = vStart; v < vEnd; ++v) {
1829         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1830         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1831         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1832         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1833       }
1834     } else {
1835       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1836     }
1837     {
1838       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1839       int                       strat = PTScotch_Strategy(pts->strategy);
1840       double                    imbal = (double)pts->imbalance;
1841 
1842       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1843       if (vtxdist[p+1] == vtxdist[size]) {
1844         if (rank == p) {
1845           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1846         }
1847       } else {
1848         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1849       }
1850     }
1851     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1852   }
1853 
1854   /* Convert to PetscSection+IS */
1855   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1856   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1857   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1858   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1859   for (p = 0, i = 0; p < nparts; ++p) {
1860     for (v = 0; v < nvtxs; ++v) {
1861       if (assignment[v] == p) points[i++] = v;
1862     }
1863   }
1864   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1865   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1866 
1867   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1868   PetscFunctionReturn(0);
1869 #else
1870   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1871 #endif
1872 }
1873 
1874 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1875 {
1876   PetscFunctionBegin;
1877   part->ops->view           = PetscPartitionerView_PTScotch;
1878   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1879   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1880   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1881   PetscFunctionReturn(0);
1882 }
1883 
1884 /*MC
1885   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1886 
1887   Level: intermediate
1888 
1889 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1890 M*/
1891 
1892 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1893 {
1894   PetscPartitioner_PTScotch *p;
1895   PetscErrorCode          ierr;
1896 
1897   PetscFunctionBegin;
1898   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1899   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1900   part->data = p;
1901 
1902   p->strategy  = 0;
1903   p->imbalance = 0.01;
1904 
1905   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1906   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1907   PetscFunctionReturn(0);
1908 }
1909 
1910 
1911 /*@
1912   DMPlexGetPartitioner - Get the mesh partitioner
1913 
1914   Not collective
1915 
1916   Input Parameter:
1917 . dm - The DM
1918 
1919   Output Parameter:
1920 . part - The PetscPartitioner
1921 
1922   Level: developer
1923 
1924   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1925 
1926 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1927 @*/
1928 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1929 {
1930   DM_Plex       *mesh = (DM_Plex *) dm->data;
1931 
1932   PetscFunctionBegin;
1933   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1934   PetscValidPointer(part, 2);
1935   *part = mesh->partitioner;
1936   PetscFunctionReturn(0);
1937 }
1938 
1939 /*@
1940   DMPlexSetPartitioner - Set the mesh partitioner
1941 
1942   logically collective on dm and part
1943 
1944   Input Parameters:
1945 + dm - The DM
1946 - part - The partitioner
1947 
1948   Level: developer
1949 
1950   Note: Any existing PetscPartitioner will be destroyed.
1951 
1952 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1953 @*/
1954 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1955 {
1956   DM_Plex       *mesh = (DM_Plex *) dm->data;
1957   PetscErrorCode ierr;
1958 
1959   PetscFunctionBegin;
1960   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1961   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1962   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1963   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1964   mesh->partitioner = part;
1965   PetscFunctionReturn(0);
1966 }
1967 
1968 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
1969 {
1970   PetscErrorCode ierr;
1971 
1972   PetscFunctionBegin;
1973   if (up) {
1974     PetscInt parent;
1975 
1976     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1977     if (parent != point) {
1978       PetscInt closureSize, *closure = NULL, i;
1979 
1980       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1981       for (i = 0; i < closureSize; i++) {
1982         PetscInt cpoint = closure[2*i];
1983 
1984         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1985         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1986       }
1987       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1988     }
1989   }
1990   if (down) {
1991     PetscInt numChildren;
1992     const PetscInt *children;
1993 
1994     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1995     if (numChildren) {
1996       PetscInt i;
1997 
1998       for (i = 0; i < numChildren; i++) {
1999         PetscInt cpoint = children[i];
2000 
2001         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2002         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2003       }
2004     }
2005   }
2006   PetscFunctionReturn(0);
2007 }
2008 
2009 /*@
2010   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2011 
2012   Input Parameters:
2013 + dm     - The DM
2014 - label  - DMLabel assinging ranks to remote roots
2015 
2016   Level: developer
2017 
2018 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2019 @*/
2020 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2021 {
2022   IS              rankIS,   pointIS;
2023   const PetscInt *ranks,   *points;
2024   PetscInt        numRanks, numPoints, r, p, c, closureSize;
2025   PetscInt       *closure = NULL;
2026   DM_Plex        *mesh    = (DM_Plex *)dm->data;
2027   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2028   PetscErrorCode  ierr;
2029 
2030   PetscFunctionBegin;
2031   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2032   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2033   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2034 
2035   for (r = 0; r < numRanks; ++r) {
2036     const PetscInt rank = ranks[r];
2037     PetscHSetI     ht;
2038     PetscInt       nelems, *elems, off = 0;
2039 
2040     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2041     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2042     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2043     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2044     ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2045     for (p = 0; p < numPoints; ++p) {
2046       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2047       for (c = 0; c < closureSize*2; c += 2) {
2048         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2049         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2050       }
2051     }
2052     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2053     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2054     ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2055     ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2056     ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2057     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2058     ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2059     ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, &pointIS);CHKERRQ(ierr);
2060     ierr = DMLabelSetStratumIS(label, rank, pointIS);CHKERRQ(ierr);
2061     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2062   }
2063 
2064   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2065   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2066   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2067   PetscFunctionReturn(0);
2068 }
2069 
2070 /*@
2071   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2072 
2073   Input Parameters:
2074 + dm     - The DM
2075 - label  - DMLabel assinging ranks to remote roots
2076 
2077   Level: developer
2078 
2079 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2080 @*/
2081 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2082 {
2083   IS              rankIS,   pointIS;
2084   const PetscInt *ranks,   *points;
2085   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2086   PetscInt       *adj = NULL;
2087   PetscErrorCode  ierr;
2088 
2089   PetscFunctionBegin;
2090   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2091   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2092   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2093   for (r = 0; r < numRanks; ++r) {
2094     const PetscInt rank = ranks[r];
2095 
2096     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2097     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2098     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2099     for (p = 0; p < numPoints; ++p) {
2100       adjSize = PETSC_DETERMINE;
2101       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2102       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2103     }
2104     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2105     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2106   }
2107   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2108   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2109   ierr = PetscFree(adj);CHKERRQ(ierr);
2110   PetscFunctionReturn(0);
2111 }
2112 
2113 /*@
2114   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2115 
2116   Input Parameters:
2117 + dm     - The DM
2118 - label  - DMLabel assinging ranks to remote roots
2119 
2120   Level: developer
2121 
2122   Note: This is required when generating multi-level overlaps to capture
2123   overlap points from non-neighbouring partitions.
2124 
2125 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2126 @*/
2127 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2128 {
2129   MPI_Comm        comm;
2130   PetscMPIInt     rank;
2131   PetscSF         sfPoint;
2132   DMLabel         lblRoots, lblLeaves;
2133   IS              rankIS, pointIS;
2134   const PetscInt *ranks;
2135   PetscInt        numRanks, r;
2136   PetscErrorCode  ierr;
2137 
2138   PetscFunctionBegin;
2139   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2140   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2141   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2142   /* Pull point contributions from remote leaves into local roots */
2143   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2144   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2145   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2146   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2147   for (r = 0; r < numRanks; ++r) {
2148     const PetscInt remoteRank = ranks[r];
2149     if (remoteRank == rank) continue;
2150     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2151     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2152     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2153   }
2154   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2155   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2156   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2157   /* Push point contributions from roots into remote leaves */
2158   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2159   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2160   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2161   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2162   for (r = 0; r < numRanks; ++r) {
2163     const PetscInt remoteRank = ranks[r];
2164     if (remoteRank == rank) continue;
2165     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2166     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2167     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2168   }
2169   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2170   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2171   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2172   PetscFunctionReturn(0);
2173 }
2174 
2175 /*@
2176   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2177 
2178   Input Parameters:
2179 + dm        - The DM
2180 . rootLabel - DMLabel assinging ranks to local roots
2181 . processSF - A star forest mapping into the local index on each remote rank
2182 
2183   Output Parameter:
2184 - leafLabel - DMLabel assinging ranks to remote roots
2185 
2186   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2187   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2188 
2189   Level: developer
2190 
2191 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2192 @*/
2193 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2194 {
2195   MPI_Comm           comm;
2196   PetscMPIInt        rank, size;
2197   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2198   PetscSF            sfPoint;
2199   PetscSFNode       *rootPoints, *leafPoints;
2200   PetscSection       rootSection, leafSection;
2201   const PetscSFNode *remote;
2202   const PetscInt    *local, *neighbors;
2203   IS                 valueIS;
2204   PetscErrorCode     ierr;
2205 
2206   PetscFunctionBegin;
2207   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2208   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2209   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2210   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2211 
2212   /* Convert to (point, rank) and use actual owners */
2213   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2214   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2215   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2216   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2217   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2218   for (n = 0; n < numNeighbors; ++n) {
2219     PetscInt numPoints;
2220 
2221     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2222     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2223   }
2224   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2225   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
2226   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
2227   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2228   for (n = 0; n < numNeighbors; ++n) {
2229     IS              pointIS;
2230     const PetscInt *points;
2231     PetscInt        off, numPoints, p;
2232 
2233     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2234     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2235     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2236     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2237     for (p = 0; p < numPoints; ++p) {
2238       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2239       else       {l = -1;}
2240       if (l >= 0) {rootPoints[off+p] = remote[l];}
2241       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2242     }
2243     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2244     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2245   }
2246   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2247   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2248   /* Communicate overlap */
2249   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2250   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2251   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2252   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
2253   for (p = 0; p < ssize; p++) {
2254     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2255   }
2256   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2257   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2258   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2259   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2260   PetscFunctionReturn(0);
2261 }
2262 
2263 /*@
2264   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2265 
2266   Input Parameters:
2267 + dm    - The DM
2268 . label - DMLabel assinging ranks to remote roots
2269 
2270   Output Parameter:
2271 - sf    - The star forest communication context encapsulating the defined mapping
2272 
2273   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2274 
2275   Level: developer
2276 
2277 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2278 @*/
2279 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2280 {
2281   PetscMPIInt     rank, size;
2282   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2283   PetscSFNode    *remotePoints;
2284   IS              remoteRootIS;
2285   const PetscInt *remoteRoots;
2286   PetscErrorCode ierr;
2287 
2288   PetscFunctionBegin;
2289   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2290   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2291 
2292   for (numRemote = 0, n = 0; n < size; ++n) {
2293     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2294     numRemote += numPoints;
2295   }
2296   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2297   /* Put owned points first */
2298   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2299   if (numPoints > 0) {
2300     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2301     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2302     for (p = 0; p < numPoints; p++) {
2303       remotePoints[idx].index = remoteRoots[p];
2304       remotePoints[idx].rank = rank;
2305       idx++;
2306     }
2307     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2308     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2309   }
2310   /* Now add remote points */
2311   for (n = 0; n < size; ++n) {
2312     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2313     if (numPoints <= 0 || n == rank) continue;
2314     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2315     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2316     for (p = 0; p < numPoints; p++) {
2317       remotePoints[idx].index = remoteRoots[p];
2318       remotePoints[idx].rank = n;
2319       idx++;
2320     }
2321     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2322     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2323   }
2324   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2325   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2326   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2327   PetscFunctionReturn(0);
2328 }
2329