xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 0358368a02ccd7dd1fafd38f7891a448f6b4501e)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 /*@C
31   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
32 
33   Input Parameters:
34 + dm      - The mesh DM dm
35 - height  - Height of the strata from which to construct the graph
36 
37   Output Parameter:
38 + numVertices     - Number of vertices in the graph
39 . offsets         - Point offsets in the graph
40 . adjacency       - Point connectivity in the graph
41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
52 {
53   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscInt       *adjCells = NULL, *remoteCells = NULL;
62   const PetscInt *local;
63   PetscInt       nroots, nleaves, l;
64   PetscMPIInt    rank;
65   PetscErrorCode ierr;
66 
67   PetscFunctionBegin;
68   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
69   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
70   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
71   if (dim != depth) {
72     /* We do not handle the uninterpolated case here */
73     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
74     /* DMPlexCreateNeighborCSR does not make a numbering */
75     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
76     /* Different behavior for empty graphs */
77     if (!*numVertices) {
78       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
79       (*offsets)[0] = 0;
80     }
81     /* Broken in parallel */
82     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
83     PetscFunctionReturn(0);
84   }
85   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
86   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
87   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
88   /* Build adjacency graph via a section/segbuffer */
89   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
90   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
91   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
92   /* Always use FVM adjacency to create partitioner graph */
93   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
94   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
95   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
96   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
97   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
98   if (globalNumbering) {
99     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
100     *globalNumbering = cellNumbering;
101   }
102   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
103   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
104      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
105    */
106   ierr = PetscSFGetGraph(dm->sf, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
107   if (nroots >= 0) {
108     PetscInt fStart, fEnd, f;
109 
110     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
111     ierr = DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd);CHKERRQ(ierr);
112     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
113     for (f = fStart; f < fEnd; ++f) {
114       const PetscInt *support;
115       PetscInt        supportSize;
116 
117       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
118       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
119       if (supportSize == 1) adjCells[f] = cellNum[support[0]];
120       else if (supportSize == 2) {
121         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
122         if (p >= 0) adjCells[f] = cellNum[support[1]];
123         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
124         if (p >= 0) adjCells[f] = cellNum[support[0]];
125       }
126     }
127     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
128     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
129     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
130     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
131     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
132   }
133   /* Combine local and global adjacencies */
134   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
135     const PetscInt *cone;
136     PetscInt        coneSize, c;
137 
138     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
139     if (nroots > 0) {if (cellNum[p] < 0) continue;}
140     /* Add remote cells */
141     if (remoteCells) {
142       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
143       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
144       for (c = 0; c < coneSize; ++c) {
145         if (remoteCells[cone[c]] != -1) {
146           PetscInt *PETSC_RESTRICT pBuf;
147 
148           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
149           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
150           *pBuf = remoteCells[cone[c]];
151         }
152       }
153     }
154     /* Add local cells */
155     adjSize = PETSC_DETERMINE;
156     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
157     for (a = 0; a < adjSize; ++a) {
158       const PetscInt point = adj[a];
159       if (point != p && pStart <= point && point < pEnd) {
160         PetscInt *PETSC_RESTRICT pBuf;
161         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
162         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
163         *pBuf = cellNum[point];
164       }
165     }
166     (*numVertices)++;
167   }
168   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
169   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
170   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
171   /* Derive CSR graph from section/segbuffer */
172   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
173   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
174   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
175   for (idx = 0, p = pStart; p < pEnd; p++) {
176     if (nroots > 0) {if (cellNum[p] < 0) continue;}
177     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
178   }
179   vOffsets[*numVertices] = size;
180   if (offsets) *offsets = vOffsets;
181   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
182   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
183   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
184   if (adjacency) *adjacency = graph;
185   /* Clean up */
186   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
187   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
188   ierr = PetscFree(adj);CHKERRQ(ierr);
189   PetscFunctionReturn(0);
190 }
191 
192 /*@C
193   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
194 
195   Collective
196 
197   Input Arguments:
198 + dm - The DMPlex
199 - cellHeight - The height of mesh points to treat as cells (default should be 0)
200 
201   Output Arguments:
202 + numVertices - The number of local vertices in the graph, or cells in the mesh.
203 . offsets     - The offset to the adjacency list for each cell
204 - adjacency   - The adjacency list for all cells
205 
206   Note: This is suitable for input to a mesh partitioner like ParMetis.
207 
208   Level: advanced
209 
210 .seealso: DMPlexCreate()
211 @*/
212 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
213 {
214   const PetscInt maxFaceCases = 30;
215   PetscInt       numFaceCases = 0;
216   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
217   PetscInt      *off, *adj;
218   PetscInt      *neighborCells = NULL;
219   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
220   PetscErrorCode ierr;
221 
222   PetscFunctionBegin;
223   /* For parallel partitioning, I think you have to communicate supports */
224   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
225   cellDim = dim - cellHeight;
226   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
227   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
228   if (cEnd - cStart == 0) {
229     if (numVertices) *numVertices = 0;
230     if (offsets)   *offsets   = NULL;
231     if (adjacency) *adjacency = NULL;
232     PetscFunctionReturn(0);
233   }
234   numCells  = cEnd - cStart;
235   faceDepth = depth - cellHeight;
236   if (dim == depth) {
237     PetscInt f, fStart, fEnd;
238 
239     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
240     /* Count neighboring cells */
241     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
242     for (f = fStart; f < fEnd; ++f) {
243       const PetscInt *support;
244       PetscInt        supportSize;
245       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
246       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
247       if (supportSize == 2) {
248         PetscInt numChildren;
249 
250         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
251         if (!numChildren) {
252           ++off[support[0]-cStart+1];
253           ++off[support[1]-cStart+1];
254         }
255       }
256     }
257     /* Prefix sum */
258     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
259     if (adjacency) {
260       PetscInt *tmp;
261 
262       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
263       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
264       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
265       /* Get neighboring cells */
266       for (f = fStart; f < fEnd; ++f) {
267         const PetscInt *support;
268         PetscInt        supportSize;
269         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
270         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
271         if (supportSize == 2) {
272           PetscInt numChildren;
273 
274           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
275           if (!numChildren) {
276             adj[tmp[support[0]-cStart]++] = support[1];
277             adj[tmp[support[1]-cStart]++] = support[0];
278           }
279         }
280       }
281 #if defined(PETSC_USE_DEBUG)
282       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
283 #endif
284       ierr = PetscFree(tmp);CHKERRQ(ierr);
285     }
286     if (numVertices) *numVertices = numCells;
287     if (offsets)   *offsets   = off;
288     if (adjacency) *adjacency = adj;
289     PetscFunctionReturn(0);
290   }
291   /* Setup face recognition */
292   if (faceDepth == 1) {
293     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
294 
295     for (c = cStart; c < cEnd; ++c) {
296       PetscInt corners;
297 
298       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
299       if (!cornersSeen[corners]) {
300         PetscInt nFV;
301 
302         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
303         cornersSeen[corners] = 1;
304 
305         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
306 
307         numFaceVertices[numFaceCases++] = nFV;
308       }
309     }
310   }
311   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
312   /* Count neighboring cells */
313   for (cell = cStart; cell < cEnd; ++cell) {
314     PetscInt numNeighbors = PETSC_DETERMINE, n;
315 
316     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
317     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
318     for (n = 0; n < numNeighbors; ++n) {
319       PetscInt        cellPair[2];
320       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
321       PetscInt        meetSize = 0;
322       const PetscInt *meet    = NULL;
323 
324       cellPair[0] = cell; cellPair[1] = neighborCells[n];
325       if (cellPair[0] == cellPair[1]) continue;
326       if (!found) {
327         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
328         if (meetSize) {
329           PetscInt f;
330 
331           for (f = 0; f < numFaceCases; ++f) {
332             if (numFaceVertices[f] == meetSize) {
333               found = PETSC_TRUE;
334               break;
335             }
336           }
337         }
338         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
339       }
340       if (found) ++off[cell-cStart+1];
341     }
342   }
343   /* Prefix sum */
344   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
345 
346   if (adjacency) {
347     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
348     /* Get neighboring cells */
349     for (cell = cStart; cell < cEnd; ++cell) {
350       PetscInt numNeighbors = PETSC_DETERMINE, n;
351       PetscInt cellOffset   = 0;
352 
353       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
354       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
355       for (n = 0; n < numNeighbors; ++n) {
356         PetscInt        cellPair[2];
357         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
358         PetscInt        meetSize = 0;
359         const PetscInt *meet    = NULL;
360 
361         cellPair[0] = cell; cellPair[1] = neighborCells[n];
362         if (cellPair[0] == cellPair[1]) continue;
363         if (!found) {
364           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
365           if (meetSize) {
366             PetscInt f;
367 
368             for (f = 0; f < numFaceCases; ++f) {
369               if (numFaceVertices[f] == meetSize) {
370                 found = PETSC_TRUE;
371                 break;
372               }
373             }
374           }
375           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
376         }
377         if (found) {
378           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
379           ++cellOffset;
380         }
381       }
382     }
383   }
384   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
385   if (numVertices) *numVertices = numCells;
386   if (offsets)   *offsets   = off;
387   if (adjacency) *adjacency = adj;
388   PetscFunctionReturn(0);
389 }
390 
391 /*@C
392   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
393 
394   Not Collective
395 
396   Input Parameters:
397 + name        - The name of a new user-defined creation routine
398 - create_func - The creation routine itself
399 
400   Notes:
401   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
402 
403   Sample usage:
404 .vb
405     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
406 .ve
407 
408   Then, your PetscPartitioner type can be chosen with the procedural interface via
409 .vb
410     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
411     PetscPartitionerSetType(PetscPartitioner, "my_part");
412 .ve
413    or at runtime via the option
414 .vb
415     -petscpartitioner_type my_part
416 .ve
417 
418   Level: advanced
419 
420 .keywords: PetscPartitioner, register
421 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
422 
423 @*/
424 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
425 {
426   PetscErrorCode ierr;
427 
428   PetscFunctionBegin;
429   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
430   PetscFunctionReturn(0);
431 }
432 
433 /*@C
434   PetscPartitionerSetType - Builds a particular PetscPartitioner
435 
436   Collective on PetscPartitioner
437 
438   Input Parameters:
439 + part - The PetscPartitioner object
440 - name - The kind of partitioner
441 
442   Options Database Key:
443 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
444 
445   Level: intermediate
446 
447 .keywords: PetscPartitioner, set, type
448 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
449 @*/
450 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
451 {
452   PetscErrorCode (*r)(PetscPartitioner);
453   PetscBool      match;
454   PetscErrorCode ierr;
455 
456   PetscFunctionBegin;
457   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
458   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
459   if (match) PetscFunctionReturn(0);
460 
461   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
462   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
463   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
464 
465   if (part->ops->destroy) {
466     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
467   }
468   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
469   ierr = (*r)(part);CHKERRQ(ierr);
470   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
471   PetscFunctionReturn(0);
472 }
473 
474 /*@C
475   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
476 
477   Not Collective
478 
479   Input Parameter:
480 . part - The PetscPartitioner
481 
482   Output Parameter:
483 . name - The PetscPartitioner type name
484 
485   Level: intermediate
486 
487 .keywords: PetscPartitioner, get, type, name
488 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
489 @*/
490 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
491 {
492   PetscErrorCode ierr;
493 
494   PetscFunctionBegin;
495   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
496   PetscValidPointer(name, 2);
497   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
498   *name = ((PetscObject) part)->type_name;
499   PetscFunctionReturn(0);
500 }
501 
502 /*@C
503   PetscPartitionerView - Views a PetscPartitioner
504 
505   Collective on PetscPartitioner
506 
507   Input Parameter:
508 + part - the PetscPartitioner object to view
509 - v    - the viewer
510 
511   Level: developer
512 
513 .seealso: PetscPartitionerDestroy()
514 @*/
515 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
516 {
517   PetscMPIInt    size;
518   PetscBool      isascii;
519   PetscErrorCode ierr;
520 
521   PetscFunctionBegin;
522   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
523   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
524   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
525   if (isascii) {
526     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
527     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
528     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
529     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
530     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
531     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
532     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
533   }
534   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
535   PetscFunctionReturn(0);
536 }
537 
538 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
539 {
540   PetscFunctionBegin;
541   if (!currentType) {
542 #if defined(PETSC_HAVE_CHACO)
543     *defaultType = PETSCPARTITIONERCHACO;
544 #elif defined(PETSC_HAVE_PARMETIS)
545     *defaultType = PETSCPARTITIONERPARMETIS;
546 #elif defined(PETSC_HAVE_PTSCOTCH)
547     *defaultType = PETSCPARTITIONERPTSCOTCH;
548 #else
549     *defaultType = PETSCPARTITIONERSIMPLE;
550 #endif
551   } else {
552     *defaultType = currentType;
553   }
554   PetscFunctionReturn(0);
555 }
556 
557 /*@
558   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
559 
560   Collective on PetscPartitioner
561 
562   Input Parameter:
563 . part - the PetscPartitioner object to set options for
564 
565   Level: developer
566 
567 .seealso: PetscPartitionerView()
568 @*/
569 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
570 {
571   const char    *defaultType;
572   char           name[256];
573   PetscBool      flg;
574   PetscErrorCode ierr;
575 
576   PetscFunctionBegin;
577   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
578   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
579   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
580   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
581   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
582   if (flg) {
583     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
584   } else if (!((PetscObject) part)->type_name) {
585     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
586   }
587   if (part->ops->setfromoptions) {
588     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
589   }
590   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
591   /* process any options handlers added with PetscObjectAddOptionsHandler() */
592   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
593   ierr = PetscOptionsEnd();CHKERRQ(ierr);
594   PetscFunctionReturn(0);
595 }
596 
597 /*@C
598   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
599 
600   Collective on PetscPartitioner
601 
602   Input Parameter:
603 . part - the PetscPartitioner object to setup
604 
605   Level: developer
606 
607 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
608 @*/
609 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
610 {
611   PetscErrorCode ierr;
612 
613   PetscFunctionBegin;
614   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
615   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
616   PetscFunctionReturn(0);
617 }
618 
619 /*@
620   PetscPartitionerDestroy - Destroys a PetscPartitioner object
621 
622   Collective on PetscPartitioner
623 
624   Input Parameter:
625 . part - the PetscPartitioner object to destroy
626 
627   Level: developer
628 
629 .seealso: PetscPartitionerView()
630 @*/
631 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
632 {
633   PetscErrorCode ierr;
634 
635   PetscFunctionBegin;
636   if (!*part) PetscFunctionReturn(0);
637   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
638 
639   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
640   ((PetscObject) (*part))->refct = 0;
641 
642   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
643   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
644   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
645   PetscFunctionReturn(0);
646 }
647 
648 /*@
649   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
650 
651   Collective on MPI_Comm
652 
653   Input Parameter:
654 . comm - The communicator for the PetscPartitioner object
655 
656   Output Parameter:
657 . part - The PetscPartitioner object
658 
659   Level: beginner
660 
661 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
662 @*/
663 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
664 {
665   PetscPartitioner p;
666   const char       *partitionerType = NULL;
667   PetscErrorCode   ierr;
668 
669   PetscFunctionBegin;
670   PetscValidPointer(part, 2);
671   *part = NULL;
672   ierr = DMInitializePackage();CHKERRQ(ierr);
673 
674   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
675   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
676   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
677 
678   p->edgeCut = 0;
679   p->balance = 0.0;
680 
681   *part = p;
682   PetscFunctionReturn(0);
683 }
684 
685 /*@
686   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
687 
688   Collective on DM
689 
690   Input Parameters:
691 + part    - The PetscPartitioner
692 - dm      - The mesh DM
693 
694   Output Parameters:
695 + partSection     - The PetscSection giving the division of points by partition
696 - partition       - The list of points by partition
697 
698   Options Database:
699 . -petscpartitioner_view_graph - View the graph we are partitioning
700 
701   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
702 
703   Level: developer
704 
705 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
706 @*/
707 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
708 {
709   PetscMPIInt    size;
710   PetscErrorCode ierr;
711 
712   PetscFunctionBegin;
713   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
714   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
715   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
716   PetscValidPointer(partition, 5);
717   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
718   if (size == 1) {
719     PetscInt *points;
720     PetscInt  cStart, cEnd, c;
721 
722     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
723     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
724     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
725     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
726     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
727     for (c = cStart; c < cEnd; ++c) points[c] = c;
728     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
729     PetscFunctionReturn(0);
730   }
731   if (part->height == 0) {
732     PetscInt numVertices;
733     PetscInt *start     = NULL;
734     PetscInt *adjacency = NULL;
735     IS       globalNumbering;
736 
737     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
738     if (part->viewGraph) {
739       PetscViewer viewer = part->viewerGraph;
740       PetscBool   isascii;
741       PetscInt    v, i;
742       PetscMPIInt rank;
743 
744       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
745       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
746       if (isascii) {
747         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
748         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
749         for (v = 0; v < numVertices; ++v) {
750           const PetscInt s = start[v];
751           const PetscInt e = start[v+1];
752 
753           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
754           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
755           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
756         }
757         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
758         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
759       }
760     }
761     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
762     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
763     ierr = PetscFree(start);CHKERRQ(ierr);
764     ierr = PetscFree(adjacency);CHKERRQ(ierr);
765     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
766       const PetscInt *globalNum;
767       const PetscInt *partIdx;
768       PetscInt *map, cStart, cEnd;
769       PetscInt *adjusted, i, localSize, offset;
770       IS    newPartition;
771 
772       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
773       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
774       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
775       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
776       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
777       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
778       for (i = cStart, offset = 0; i < cEnd; i++) {
779         if (globalNum[i - cStart] >= 0) map[offset++] = i;
780       }
781       for (i = 0; i < localSize; i++) {
782         adjusted[i] = map[partIdx[i]];
783       }
784       ierr = PetscFree(map);CHKERRQ(ierr);
785       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
786       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
787       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
788       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
789       ierr = ISDestroy(partition);CHKERRQ(ierr);
790       *partition = newPartition;
791     }
792   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
793   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
794   PetscFunctionReturn(0);
795 }
796 
797 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
798 {
799   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
800   PetscErrorCode          ierr;
801 
802   PetscFunctionBegin;
803   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
804   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
805   ierr = PetscFree(p);CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
810 {
811   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
812   PetscErrorCode          ierr;
813 
814   PetscFunctionBegin;
815   if (p->random) {
816     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
817     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
818     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
819   }
820   PetscFunctionReturn(0);
821 }
822 
823 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
824 {
825   PetscBool      iascii;
826   PetscErrorCode ierr;
827 
828   PetscFunctionBegin;
829   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
830   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
831   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
832   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
833   PetscFunctionReturn(0);
834 }
835 
836 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
837 {
838   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
839   PetscErrorCode          ierr;
840 
841   PetscFunctionBegin;
842   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
843   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
844   ierr = PetscOptionsTail();CHKERRQ(ierr);
845   PetscFunctionReturn(0);
846 }
847 
848 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
849 {
850   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
851   PetscInt                np;
852   PetscErrorCode          ierr;
853 
854   PetscFunctionBegin;
855   if (p->random) {
856     PetscRandom r;
857     PetscInt   *sizes, *points, v, p;
858     PetscMPIInt rank;
859 
860     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
861     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
862     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
863     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
864     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
865     for (v = 0; v < numVertices; ++v) {points[v] = v;}
866     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
867     for (v = numVertices-1; v > 0; --v) {
868       PetscReal val;
869       PetscInt  w, tmp;
870 
871       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
872       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
873       w    = PetscFloorReal(val);
874       tmp       = points[v];
875       points[v] = points[w];
876       points[w] = tmp;
877     }
878     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
879     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
880     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
881   }
882   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
883   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
884   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
885   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
886   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
887   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
888   *partition = p->partition;
889   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
890   PetscFunctionReturn(0);
891 }
892 
893 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
894 {
895   PetscFunctionBegin;
896   part->ops->view           = PetscPartitionerView_Shell;
897   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
898   part->ops->destroy        = PetscPartitionerDestroy_Shell;
899   part->ops->partition      = PetscPartitionerPartition_Shell;
900   PetscFunctionReturn(0);
901 }
902 
903 /*MC
904   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
905 
906   Level: intermediate
907 
908 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
909 M*/
910 
911 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
912 {
913   PetscPartitioner_Shell *p;
914   PetscErrorCode          ierr;
915 
916   PetscFunctionBegin;
917   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
918   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
919   part->data = p;
920 
921   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
922   p->random = PETSC_FALSE;
923   PetscFunctionReturn(0);
924 }
925 
926 /*@C
927   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
928 
929   Collective on Part
930 
931   Input Parameters:
932 + part     - The PetscPartitioner
933 . size - The number of partitions
934 . sizes    - array of size size (or NULL) providing the number of points in each partition
935 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
936 
937   Level: developer
938 
939   Notes:
940 
941     It is safe to free the sizes and points arrays after use in this routine.
942 
943 .seealso DMPlexDistribute(), PetscPartitionerCreate()
944 @*/
945 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
946 {
947   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
948   PetscInt                proc, numPoints;
949   PetscErrorCode          ierr;
950 
951   PetscFunctionBegin;
952   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
953   if (sizes)  {PetscValidPointer(sizes, 3);}
954   if (points) {PetscValidPointer(points, 4);}
955   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
956   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
957   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
958   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
959   if (sizes) {
960     for (proc = 0; proc < size; ++proc) {
961       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
962     }
963   }
964   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
965   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
966   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969 
970 /*@
971   PetscPartitionerShellSetRandom - Set the flag to use a random partition
972 
973   Collective on Part
974 
975   Input Parameters:
976 + part   - The PetscPartitioner
977 - random - The flag to use a random partition
978 
979   Level: intermediate
980 
981 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
982 @*/
983 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
984 {
985   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
986 
987   PetscFunctionBegin;
988   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
989   p->random = random;
990   PetscFunctionReturn(0);
991 }
992 
993 /*@
994   PetscPartitionerShellGetRandom - get the flag to use a random partition
995 
996   Collective on Part
997 
998   Input Parameter:
999 . part   - The PetscPartitioner
1000 
1001   Output Parameter
1002 . random - The flag to use a random partition
1003 
1004   Level: intermediate
1005 
1006 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1007 @*/
1008 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1009 {
1010   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1011 
1012   PetscFunctionBegin;
1013   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1014   PetscValidPointer(random, 2);
1015   *random = p->random;
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1020 {
1021   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1022   PetscErrorCode          ierr;
1023 
1024   PetscFunctionBegin;
1025   ierr = PetscFree(p);CHKERRQ(ierr);
1026   PetscFunctionReturn(0);
1027 }
1028 
1029 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1030 {
1031   PetscFunctionBegin;
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1036 {
1037   PetscBool      iascii;
1038   PetscErrorCode ierr;
1039 
1040   PetscFunctionBegin;
1041   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1042   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1043   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1044   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1045   PetscFunctionReturn(0);
1046 }
1047 
1048 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1049 {
1050   MPI_Comm       comm;
1051   PetscInt       np;
1052   PetscMPIInt    size;
1053   PetscErrorCode ierr;
1054 
1055   PetscFunctionBegin;
1056   comm = PetscObjectComm((PetscObject)dm);
1057   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1058   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1059   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1060   if (size == 1) {
1061     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1062   }
1063   else {
1064     PetscMPIInt rank;
1065     PetscInt nvGlobal, *offsets, myFirst, myLast;
1066 
1067     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1068     offsets[0] = 0;
1069     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1070     for (np = 2; np <= size; np++) {
1071       offsets[np] += offsets[np-1];
1072     }
1073     nvGlobal = offsets[size];
1074     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1075     myFirst = offsets[rank];
1076     myLast  = offsets[rank + 1] - 1;
1077     ierr = PetscFree(offsets);CHKERRQ(ierr);
1078     if (numVertices) {
1079       PetscInt firstPart = 0, firstLargePart = 0;
1080       PetscInt lastPart = 0, lastLargePart = 0;
1081       PetscInt rem = nvGlobal % nparts;
1082       PetscInt pSmall = nvGlobal/nparts;
1083       PetscInt pBig = nvGlobal/nparts + 1;
1084 
1085 
1086       if (rem) {
1087         firstLargePart = myFirst / pBig;
1088         lastLargePart  = myLast  / pBig;
1089 
1090         if (firstLargePart < rem) {
1091           firstPart = firstLargePart;
1092         }
1093         else {
1094           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1095         }
1096         if (lastLargePart < rem) {
1097           lastPart = lastLargePart;
1098         }
1099         else {
1100           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1101         }
1102       }
1103       else {
1104         firstPart = myFirst / (nvGlobal/nparts);
1105         lastPart  = myLast  / (nvGlobal/nparts);
1106       }
1107 
1108       for (np = firstPart; np <= lastPart; np++) {
1109         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1110         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1111 
1112         PartStart = PetscMax(PartStart,myFirst);
1113         PartEnd   = PetscMin(PartEnd,myLast+1);
1114         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1115       }
1116     }
1117   }
1118   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1119   PetscFunctionReturn(0);
1120 }
1121 
1122 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1123 {
1124   PetscFunctionBegin;
1125   part->ops->view      = PetscPartitionerView_Simple;
1126   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1127   part->ops->partition = PetscPartitionerPartition_Simple;
1128   PetscFunctionReturn(0);
1129 }
1130 
1131 /*MC
1132   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1133 
1134   Level: intermediate
1135 
1136 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1137 M*/
1138 
1139 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1140 {
1141   PetscPartitioner_Simple *p;
1142   PetscErrorCode           ierr;
1143 
1144   PetscFunctionBegin;
1145   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1146   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1147   part->data = p;
1148 
1149   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1150   PetscFunctionReturn(0);
1151 }
1152 
1153 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1154 {
1155   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1156   PetscErrorCode          ierr;
1157 
1158   PetscFunctionBegin;
1159   ierr = PetscFree(p);CHKERRQ(ierr);
1160   PetscFunctionReturn(0);
1161 }
1162 
1163 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1164 {
1165   PetscFunctionBegin;
1166   PetscFunctionReturn(0);
1167 }
1168 
1169 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1170 {
1171   PetscBool      iascii;
1172   PetscErrorCode ierr;
1173 
1174   PetscFunctionBegin;
1175   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1176   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1177   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1178   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1183 {
1184   PetscInt       np;
1185   PetscErrorCode ierr;
1186 
1187   PetscFunctionBegin;
1188   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1189   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1190   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1191   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1192   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1197 {
1198   PetscFunctionBegin;
1199   part->ops->view      = PetscPartitionerView_Gather;
1200   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1201   part->ops->partition = PetscPartitionerPartition_Gather;
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 /*MC
1206   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1207 
1208   Level: intermediate
1209 
1210 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1211 M*/
1212 
1213 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1214 {
1215   PetscPartitioner_Gather *p;
1216   PetscErrorCode           ierr;
1217 
1218   PetscFunctionBegin;
1219   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1220   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1221   part->data = p;
1222 
1223   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1224   PetscFunctionReturn(0);
1225 }
1226 
1227 
1228 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1229 {
1230   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1231   PetscErrorCode          ierr;
1232 
1233   PetscFunctionBegin;
1234   ierr = PetscFree(p);CHKERRQ(ierr);
1235   PetscFunctionReturn(0);
1236 }
1237 
1238 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1239 {
1240   PetscFunctionBegin;
1241   PetscFunctionReturn(0);
1242 }
1243 
1244 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1245 {
1246   PetscBool      iascii;
1247   PetscErrorCode ierr;
1248 
1249   PetscFunctionBegin;
1250   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1251   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1252   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1253   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 #if defined(PETSC_HAVE_CHACO)
1258 #if defined(PETSC_HAVE_UNISTD_H)
1259 #include <unistd.h>
1260 #endif
1261 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1262 #include <chaco.h>
1263 #else
1264 /* Older versions of Chaco do not have an include file */
1265 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1266                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1267                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1268                        int mesh_dims[3], double *goal, int global_method, int local_method,
1269                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1270 #endif
1271 extern int FREE_GRAPH;
1272 #endif
1273 
1274 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1275 {
1276 #if defined(PETSC_HAVE_CHACO)
1277   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1278   MPI_Comm       comm;
1279   int            nvtxs          = numVertices; /* number of vertices in full graph */
1280   int           *vwgts          = NULL;   /* weights for all vertices */
1281   float         *ewgts          = NULL;   /* weights for all edges */
1282   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1283   char          *outassignname  = NULL;   /*  name of assignment output file */
1284   char          *outfilename    = NULL;   /* output file name */
1285   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1286   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1287   int            mesh_dims[3];            /* dimensions of mesh of processors */
1288   double        *goal          = NULL;    /* desired set sizes for each set */
1289   int            global_method = 1;       /* global partitioning algorithm */
1290   int            local_method  = 1;       /* local partitioning algorithm */
1291   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1292   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1293   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1294   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1295   long           seed          = 123636512; /* for random graph mutations */
1296 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1297   int           *assignment;              /* Output partition */
1298 #else
1299   short int     *assignment;              /* Output partition */
1300 #endif
1301   int            fd_stdout, fd_pipe[2];
1302   PetscInt      *points;
1303   int            i, v, p;
1304   PetscErrorCode ierr;
1305 
1306   PetscFunctionBegin;
1307   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1308 #if defined (PETSC_USE_DEBUG)
1309   {
1310     int ival,isum;
1311     PetscBool distributed;
1312 
1313     ival = (numVertices > 0);
1314     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1315     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1316     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1317   }
1318 #endif
1319   if (!numVertices) {
1320     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1321     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1322     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1323     PetscFunctionReturn(0);
1324   }
1325   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1326   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1327 
1328   if (global_method == INERTIAL_METHOD) {
1329     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1330     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1331   }
1332   mesh_dims[0] = nparts;
1333   mesh_dims[1] = 1;
1334   mesh_dims[2] = 1;
1335   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1336   /* Chaco outputs to stdout. We redirect this to a buffer. */
1337   /* TODO: check error codes for UNIX calls */
1338 #if defined(PETSC_HAVE_UNISTD_H)
1339   {
1340     int piperet;
1341     piperet = pipe(fd_pipe);
1342     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1343     fd_stdout = dup(1);
1344     close(1);
1345     dup2(fd_pipe[1], 1);
1346   }
1347 #endif
1348   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1349                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1350                    vmax, ndims, eigtol, seed);
1351 #if defined(PETSC_HAVE_UNISTD_H)
1352   {
1353     char msgLog[10000];
1354     int  count;
1355 
1356     fflush(stdout);
1357     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1358     if (count < 0) count = 0;
1359     msgLog[count] = 0;
1360     close(1);
1361     dup2(fd_stdout, 1);
1362     close(fd_stdout);
1363     close(fd_pipe[0]);
1364     close(fd_pipe[1]);
1365     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1366   }
1367 #else
1368   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1369 #endif
1370   /* Convert to PetscSection+IS */
1371   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1372   for (v = 0; v < nvtxs; ++v) {
1373     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1374   }
1375   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1376   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1377   for (p = 0, i = 0; p < nparts; ++p) {
1378     for (v = 0; v < nvtxs; ++v) {
1379       if (assignment[v] == p) points[i++] = v;
1380     }
1381   }
1382   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1383   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1384   if (global_method == INERTIAL_METHOD) {
1385     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1386   }
1387   ierr = PetscFree(assignment);CHKERRQ(ierr);
1388   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1389   PetscFunctionReturn(0);
1390 #else
1391   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1392 #endif
1393 }
1394 
1395 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1396 {
1397   PetscFunctionBegin;
1398   part->ops->view      = PetscPartitionerView_Chaco;
1399   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1400   part->ops->partition = PetscPartitionerPartition_Chaco;
1401   PetscFunctionReturn(0);
1402 }
1403 
1404 /*MC
1405   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1406 
1407   Level: intermediate
1408 
1409 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1410 M*/
1411 
1412 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1413 {
1414   PetscPartitioner_Chaco *p;
1415   PetscErrorCode          ierr;
1416 
1417   PetscFunctionBegin;
1418   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1419   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1420   part->data = p;
1421 
1422   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1423   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1424   PetscFunctionReturn(0);
1425 }
1426 
1427 static const char *ptypes[] = {"kway", "rb"};
1428 
1429 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1430 {
1431   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1432   PetscErrorCode             ierr;
1433 
1434   PetscFunctionBegin;
1435   ierr = PetscFree(p);CHKERRQ(ierr);
1436   PetscFunctionReturn(0);
1437 }
1438 
1439 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1440 {
1441   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1442   PetscErrorCode             ierr;
1443 
1444   PetscFunctionBegin;
1445   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1446   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1447   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1448   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1449   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1450   PetscFunctionReturn(0);
1451 }
1452 
1453 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1454 {
1455   PetscBool      iascii;
1456   PetscErrorCode ierr;
1457 
1458   PetscFunctionBegin;
1459   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1460   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1461   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1462   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1463   PetscFunctionReturn(0);
1464 }
1465 
1466 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1467 {
1468   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1469   PetscErrorCode            ierr;
1470 
1471   PetscFunctionBegin;
1472   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1473   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1474   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1475   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1476   ierr = PetscOptionsTail();CHKERRQ(ierr);
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 #if defined(PETSC_HAVE_PARMETIS)
1481 #include <parmetis.h>
1482 #endif
1483 
1484 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1485 {
1486 #if defined(PETSC_HAVE_PARMETIS)
1487   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1488   MPI_Comm       comm;
1489   PetscSection   section;
1490   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1491   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1492   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1493   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1494   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1495   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1496   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1497   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1498   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1499   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1500   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1501   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1502   PetscInt       options[64];               /* Options */
1503   /* Outputs */
1504   PetscInt       v, i, *assignment, *points;
1505   PetscMPIInt    size, rank, p;
1506   PetscErrorCode ierr;
1507 
1508   PetscFunctionBegin;
1509   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1510   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1511   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1512   /* Calculate vertex distribution */
1513   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1514   vtxdist[0] = 0;
1515   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1516   for (p = 2; p <= size; ++p) {
1517     vtxdist[p] += vtxdist[p-1];
1518   }
1519   /* Calculate partition weights */
1520   for (p = 0; p < nparts; ++p) {
1521     tpwgts[p] = 1.0/nparts;
1522   }
1523   ubvec[0] = pm->imbalanceRatio;
1524   /* Weight cells by dofs on cell by default */
1525   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1526   if (section) {
1527     PetscInt cStart, cEnd, dof;
1528 
1529     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1530     for (v = cStart; v < cEnd; ++v) {
1531       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1532       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1533       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1534       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1535     }
1536   } else {
1537     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1538   }
1539   wgtflag |= 2; /* have weights on graph vertices */
1540 
1541   if (nparts == 1) {
1542     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1543   } else {
1544     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1545     if (vtxdist[p+1] == vtxdist[size]) {
1546       if (rank == p) {
1547         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1548         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1549         if (metis_ptype == 1) {
1550           PetscStackPush("METIS_PartGraphRecursive");
1551           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1552           PetscStackPop;
1553           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1554         } else {
1555           /*
1556            It would be nice to activate the two options below, but they would need some actual testing.
1557            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1558            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1559           */
1560           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1561           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1562           PetscStackPush("METIS_PartGraphKway");
1563           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1564           PetscStackPop;
1565           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1566         }
1567       }
1568     } else {
1569       options[0] = 1;
1570       options[1] = pm->debugFlag;
1571       PetscStackPush("ParMETIS_V3_PartKway");
1572       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1573       PetscStackPop;
1574       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1575     }
1576   }
1577   /* Convert to PetscSection+IS */
1578   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1579   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1580   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1581   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1582   for (p = 0, i = 0; p < nparts; ++p) {
1583     for (v = 0; v < nvtxs; ++v) {
1584       if (assignment[v] == p) points[i++] = v;
1585     }
1586   }
1587   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1588   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1589   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1590   PetscFunctionReturn(0);
1591 #else
1592   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1593 #endif
1594 }
1595 
1596 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1597 {
1598   PetscFunctionBegin;
1599   part->ops->view           = PetscPartitionerView_ParMetis;
1600   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1601   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1602   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1603   PetscFunctionReturn(0);
1604 }
1605 
1606 /*MC
1607   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1608 
1609   Level: intermediate
1610 
1611 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1612 M*/
1613 
1614 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1615 {
1616   PetscPartitioner_ParMetis *p;
1617   PetscErrorCode          ierr;
1618 
1619   PetscFunctionBegin;
1620   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1621   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1622   part->data = p;
1623 
1624   p->ptype          = 0;
1625   p->imbalanceRatio = 1.05;
1626   p->debugFlag      = 0;
1627 
1628   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1629   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1630   PetscFunctionReturn(0);
1631 }
1632 
1633 
1634 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1635 const char PTScotchPartitionerCitation[] =
1636   "@article{PTSCOTCH,\n"
1637   "  author  = {C. Chevalier and F. Pellegrini},\n"
1638   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1639   "  journal = {Parallel Computing},\n"
1640   "  volume  = {34},\n"
1641   "  number  = {6},\n"
1642   "  pages   = {318--331},\n"
1643   "  year    = {2008},\n"
1644   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1645   "}\n";
1646 
1647 typedef struct {
1648   PetscInt  strategy;
1649   PetscReal imbalance;
1650 } PetscPartitioner_PTScotch;
1651 
1652 static const char *const
1653 PTScotchStrategyList[] = {
1654   "DEFAULT",
1655   "QUALITY",
1656   "SPEED",
1657   "BALANCE",
1658   "SAFETY",
1659   "SCALABILITY",
1660   "RECURSIVE",
1661   "REMAP"
1662 };
1663 
1664 #if defined(PETSC_HAVE_PTSCOTCH)
1665 
1666 EXTERN_C_BEGIN
1667 #include <ptscotch.h>
1668 EXTERN_C_END
1669 
1670 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1671 
1672 static int PTScotch_Strategy(PetscInt strategy)
1673 {
1674   switch (strategy) {
1675   case  0: return SCOTCH_STRATDEFAULT;
1676   case  1: return SCOTCH_STRATQUALITY;
1677   case  2: return SCOTCH_STRATSPEED;
1678   case  3: return SCOTCH_STRATBALANCE;
1679   case  4: return SCOTCH_STRATSAFETY;
1680   case  5: return SCOTCH_STRATSCALABILITY;
1681   case  6: return SCOTCH_STRATRECURSIVE;
1682   case  7: return SCOTCH_STRATREMAP;
1683   default: return SCOTCH_STRATDEFAULT;
1684   }
1685 }
1686 
1687 
1688 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1689                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1690 {
1691   SCOTCH_Graph   grafdat;
1692   SCOTCH_Strat   stradat;
1693   SCOTCH_Num     vertnbr = n;
1694   SCOTCH_Num     edgenbr = xadj[n];
1695   SCOTCH_Num*    velotab = vtxwgt;
1696   SCOTCH_Num*    edlotab = adjwgt;
1697   SCOTCH_Num     flagval = strategy;
1698   double         kbalval = imbalance;
1699   PetscErrorCode ierr;
1700 
1701   PetscFunctionBegin;
1702   {
1703     PetscBool flg = PETSC_TRUE;
1704     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1705     if (!flg) velotab = NULL;
1706   }
1707   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1708   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1709   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1710   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1711 #if defined(PETSC_USE_DEBUG)
1712   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1713 #endif
1714   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1715   SCOTCH_stratExit(&stradat);
1716   SCOTCH_graphExit(&grafdat);
1717   PetscFunctionReturn(0);
1718 }
1719 
1720 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1721                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1722 {
1723   PetscMPIInt     procglbnbr;
1724   PetscMPIInt     proclocnum;
1725   SCOTCH_Arch     archdat;
1726   SCOTCH_Dgraph   grafdat;
1727   SCOTCH_Dmapping mappdat;
1728   SCOTCH_Strat    stradat;
1729   SCOTCH_Num      vertlocnbr;
1730   SCOTCH_Num      edgelocnbr;
1731   SCOTCH_Num*     veloloctab = vtxwgt;
1732   SCOTCH_Num*     edloloctab = adjwgt;
1733   SCOTCH_Num      flagval = strategy;
1734   double          kbalval = imbalance;
1735   PetscErrorCode  ierr;
1736 
1737   PetscFunctionBegin;
1738   {
1739     PetscBool flg = PETSC_TRUE;
1740     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1741     if (!flg) veloloctab = NULL;
1742   }
1743   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1744   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1745   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1746   edgelocnbr = xadj[vertlocnbr];
1747 
1748   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1749   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1750 #if defined(PETSC_USE_DEBUG)
1751   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1752 #endif
1753   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1754   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1755   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1756   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1757   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1758   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1759   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1760   SCOTCH_archExit(&archdat);
1761   SCOTCH_stratExit(&stradat);
1762   SCOTCH_dgraphExit(&grafdat);
1763   PetscFunctionReturn(0);
1764 }
1765 
1766 #endif /* PETSC_HAVE_PTSCOTCH */
1767 
1768 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1769 {
1770   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1771   PetscErrorCode             ierr;
1772 
1773   PetscFunctionBegin;
1774   ierr = PetscFree(p);CHKERRQ(ierr);
1775   PetscFunctionReturn(0);
1776 }
1777 
1778 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1779 {
1780   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1781   PetscErrorCode            ierr;
1782 
1783   PetscFunctionBegin;
1784   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1785   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1786   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1787   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1788   PetscFunctionReturn(0);
1789 }
1790 
1791 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1792 {
1793   PetscBool      iascii;
1794   PetscErrorCode ierr;
1795 
1796   PetscFunctionBegin;
1797   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1798   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1799   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1800   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1801   PetscFunctionReturn(0);
1802 }
1803 
1804 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1805 {
1806   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1807   const char *const         *slist = PTScotchStrategyList;
1808   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1809   PetscBool                 flag;
1810   PetscErrorCode            ierr;
1811 
1812   PetscFunctionBegin;
1813   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1814   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1815   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1816   ierr = PetscOptionsTail();CHKERRQ(ierr);
1817   PetscFunctionReturn(0);
1818 }
1819 
1820 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1821 {
1822 #if defined(PETSC_HAVE_PTSCOTCH)
1823   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1824   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1825   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1826   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1827   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1828   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1829   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1830   PetscInt       v, i, *assignment, *points;
1831   PetscMPIInt    size, rank, p;
1832   PetscErrorCode ierr;
1833 
1834   PetscFunctionBegin;
1835   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1836   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1837   ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1838 
1839   /* Calculate vertex distribution */
1840   vtxdist[0] = 0;
1841   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1842   for (p = 2; p <= size; ++p) {
1843     vtxdist[p] += vtxdist[p-1];
1844   }
1845 
1846   if (nparts == 1) {
1847     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1848   } else {
1849     PetscSection section;
1850     /* Weight cells by dofs on cell by default */
1851     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1852     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1853     if (section) {
1854       PetscInt vStart, vEnd, dof;
1855       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1856       for (v = vStart; v < vEnd; ++v) {
1857         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1858         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1859         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1860         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1861       }
1862     } else {
1863       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1864     }
1865     {
1866       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1867       int                       strat = PTScotch_Strategy(pts->strategy);
1868       double                    imbal = (double)pts->imbalance;
1869 
1870       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1871       if (vtxdist[p+1] == vtxdist[size]) {
1872         if (rank == p) {
1873           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1874         }
1875       } else {
1876         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1877       }
1878     }
1879     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1880   }
1881 
1882   /* Convert to PetscSection+IS */
1883   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1884   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1885   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1886   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1887   for (p = 0, i = 0; p < nparts; ++p) {
1888     for (v = 0; v < nvtxs; ++v) {
1889       if (assignment[v] == p) points[i++] = v;
1890     }
1891   }
1892   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1893   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1894 
1895   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1896   PetscFunctionReturn(0);
1897 #else
1898   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1899 #endif
1900 }
1901 
1902 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1903 {
1904   PetscFunctionBegin;
1905   part->ops->view           = PetscPartitionerView_PTScotch;
1906   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1907   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1908   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1909   PetscFunctionReturn(0);
1910 }
1911 
1912 /*MC
1913   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1914 
1915   Level: intermediate
1916 
1917 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1918 M*/
1919 
1920 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1921 {
1922   PetscPartitioner_PTScotch *p;
1923   PetscErrorCode          ierr;
1924 
1925   PetscFunctionBegin;
1926   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1927   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1928   part->data = p;
1929 
1930   p->strategy  = 0;
1931   p->imbalance = 0.01;
1932 
1933   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1934   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1935   PetscFunctionReturn(0);
1936 }
1937 
1938 
1939 /*@
1940   DMPlexGetPartitioner - Get the mesh partitioner
1941 
1942   Not collective
1943 
1944   Input Parameter:
1945 . dm - The DM
1946 
1947   Output Parameter:
1948 . part - The PetscPartitioner
1949 
1950   Level: developer
1951 
1952   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1953 
1954 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1955 @*/
1956 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1957 {
1958   DM_Plex       *mesh = (DM_Plex *) dm->data;
1959 
1960   PetscFunctionBegin;
1961   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1962   PetscValidPointer(part, 2);
1963   *part = mesh->partitioner;
1964   PetscFunctionReturn(0);
1965 }
1966 
1967 /*@
1968   DMPlexSetPartitioner - Set the mesh partitioner
1969 
1970   logically collective on dm and part
1971 
1972   Input Parameters:
1973 + dm - The DM
1974 - part - The partitioner
1975 
1976   Level: developer
1977 
1978   Note: Any existing PetscPartitioner will be destroyed.
1979 
1980 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1981 @*/
1982 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1983 {
1984   DM_Plex       *mesh = (DM_Plex *) dm->data;
1985   PetscErrorCode ierr;
1986 
1987   PetscFunctionBegin;
1988   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1989   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1990   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1991   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1992   mesh->partitioner = part;
1993   PetscFunctionReturn(0);
1994 }
1995 
1996 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
1997 {
1998   PetscErrorCode ierr;
1999 
2000   PetscFunctionBegin;
2001   if (up) {
2002     PetscInt parent;
2003 
2004     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2005     if (parent != point) {
2006       PetscInt closureSize, *closure = NULL, i;
2007 
2008       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2009       for (i = 0; i < closureSize; i++) {
2010         PetscInt cpoint = closure[2*i];
2011 
2012         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2013         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2014       }
2015       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2016     }
2017   }
2018   if (down) {
2019     PetscInt numChildren;
2020     const PetscInt *children;
2021 
2022     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2023     if (numChildren) {
2024       PetscInt i;
2025 
2026       for (i = 0; i < numChildren; i++) {
2027         PetscInt cpoint = children[i];
2028 
2029         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2030         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2031       }
2032     }
2033   }
2034   PetscFunctionReturn(0);
2035 }
2036 
2037 /*@
2038   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2039 
2040   Input Parameters:
2041 + dm     - The DM
2042 - label  - DMLabel assinging ranks to remote roots
2043 
2044   Level: developer
2045 
2046 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2047 @*/
2048 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2049 {
2050   IS              rankIS,   pointIS;
2051   const PetscInt *ranks,   *points;
2052   PetscInt        numRanks, numPoints, r, p, c, closureSize;
2053   PetscInt       *closure = NULL;
2054   DM_Plex        *mesh    = (DM_Plex *)dm->data;
2055   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2056   PetscErrorCode  ierr;
2057 
2058   PetscFunctionBegin;
2059   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2060   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2061   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2062 
2063   for (r = 0; r < numRanks; ++r) {
2064     const PetscInt rank = ranks[r];
2065     PetscHSetI     ht;
2066     PetscInt       nelems, *elems, off = 0;
2067 
2068     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2069     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2070     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2071     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2072     ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2073     for (p = 0; p < numPoints; ++p) {
2074       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2075       for (c = 0; c < closureSize*2; c += 2) {
2076         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2077         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2078       }
2079     }
2080     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2081     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2082     ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2083     ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2084     ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2085     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2086     ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2087     ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, &pointIS);CHKERRQ(ierr);
2088     ierr = DMLabelSetStratumIS(label, rank, pointIS);CHKERRQ(ierr);
2089     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2090   }
2091 
2092   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2093   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2094   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2095   PetscFunctionReturn(0);
2096 }
2097 
2098 /*@
2099   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2100 
2101   Input Parameters:
2102 + dm     - The DM
2103 - label  - DMLabel assinging ranks to remote roots
2104 
2105   Level: developer
2106 
2107 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2108 @*/
2109 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2110 {
2111   IS              rankIS,   pointIS;
2112   const PetscInt *ranks,   *points;
2113   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2114   PetscInt       *adj = NULL;
2115   PetscErrorCode  ierr;
2116 
2117   PetscFunctionBegin;
2118   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2119   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2120   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2121   for (r = 0; r < numRanks; ++r) {
2122     const PetscInt rank = ranks[r];
2123 
2124     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2125     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2126     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2127     for (p = 0; p < numPoints; ++p) {
2128       adjSize = PETSC_DETERMINE;
2129       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2130       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2131     }
2132     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2133     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2134   }
2135   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2136   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2137   ierr = PetscFree(adj);CHKERRQ(ierr);
2138   PetscFunctionReturn(0);
2139 }
2140 
2141 /*@
2142   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2143 
2144   Input Parameters:
2145 + dm     - The DM
2146 - label  - DMLabel assinging ranks to remote roots
2147 
2148   Level: developer
2149 
2150   Note: This is required when generating multi-level overlaps to capture
2151   overlap points from non-neighbouring partitions.
2152 
2153 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2154 @*/
2155 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2156 {
2157   MPI_Comm        comm;
2158   PetscMPIInt     rank;
2159   PetscSF         sfPoint;
2160   DMLabel         lblRoots, lblLeaves;
2161   IS              rankIS, pointIS;
2162   const PetscInt *ranks;
2163   PetscInt        numRanks, r;
2164   PetscErrorCode  ierr;
2165 
2166   PetscFunctionBegin;
2167   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2168   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2169   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2170   /* Pull point contributions from remote leaves into local roots */
2171   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2172   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2173   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2174   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2175   for (r = 0; r < numRanks; ++r) {
2176     const PetscInt remoteRank = ranks[r];
2177     if (remoteRank == rank) continue;
2178     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2179     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2180     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2181   }
2182   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2183   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2184   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2185   /* Push point contributions from roots into remote leaves */
2186   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2187   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2188   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2189   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2190   for (r = 0; r < numRanks; ++r) {
2191     const PetscInt remoteRank = ranks[r];
2192     if (remoteRank == rank) continue;
2193     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2194     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2195     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2196   }
2197   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2198   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2199   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2200   PetscFunctionReturn(0);
2201 }
2202 
2203 /*@
2204   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2205 
2206   Input Parameters:
2207 + dm        - The DM
2208 . rootLabel - DMLabel assinging ranks to local roots
2209 . processSF - A star forest mapping into the local index on each remote rank
2210 
2211   Output Parameter:
2212 - leafLabel - DMLabel assinging ranks to remote roots
2213 
2214   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2215   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2216 
2217   Level: developer
2218 
2219 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2220 @*/
2221 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2222 {
2223   MPI_Comm           comm;
2224   PetscMPIInt        rank, size;
2225   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2226   PetscSF            sfPoint;
2227   PetscSFNode       *rootPoints, *leafPoints;
2228   PetscSection       rootSection, leafSection;
2229   const PetscSFNode *remote;
2230   const PetscInt    *local, *neighbors;
2231   IS                 valueIS;
2232   PetscErrorCode     ierr;
2233 
2234   PetscFunctionBegin;
2235   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2236   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2237   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2238   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2239 
2240   /* Convert to (point, rank) and use actual owners */
2241   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2242   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2243   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2244   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2245   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2246   for (n = 0; n < numNeighbors; ++n) {
2247     PetscInt numPoints;
2248 
2249     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2250     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2251   }
2252   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2253   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
2254   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
2255   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2256   for (n = 0; n < numNeighbors; ++n) {
2257     IS              pointIS;
2258     const PetscInt *points;
2259     PetscInt        off, numPoints, p;
2260 
2261     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2262     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2263     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2264     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2265     for (p = 0; p < numPoints; ++p) {
2266       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2267       else       {l = -1;}
2268       if (l >= 0) {rootPoints[off+p] = remote[l];}
2269       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2270     }
2271     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2272     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2273   }
2274   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2275   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2276   /* Communicate overlap */
2277   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2278   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2279   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2280   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
2281   for (p = 0; p < ssize; p++) {
2282     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2283   }
2284   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2285   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2286   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2287   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2288   PetscFunctionReturn(0);
2289 }
2290 
2291 /*@
2292   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2293 
2294   Input Parameters:
2295 + dm    - The DM
2296 . label - DMLabel assinging ranks to remote roots
2297 
2298   Output Parameter:
2299 - sf    - The star forest communication context encapsulating the defined mapping
2300 
2301   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2302 
2303   Level: developer
2304 
2305 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2306 @*/
2307 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2308 {
2309   PetscMPIInt     rank, size;
2310   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2311   PetscSFNode    *remotePoints;
2312   IS              remoteRootIS;
2313   const PetscInt *remoteRoots;
2314   PetscErrorCode ierr;
2315 
2316   PetscFunctionBegin;
2317   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2318   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2319 
2320   for (numRemote = 0, n = 0; n < size; ++n) {
2321     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2322     numRemote += numPoints;
2323   }
2324   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2325   /* Put owned points first */
2326   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2327   if (numPoints > 0) {
2328     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2329     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2330     for (p = 0; p < numPoints; p++) {
2331       remotePoints[idx].index = remoteRoots[p];
2332       remotePoints[idx].rank = rank;
2333       idx++;
2334     }
2335     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2336     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2337   }
2338   /* Now add remote points */
2339   for (n = 0; n < size; ++n) {
2340     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2341     if (numPoints <= 0 || n == rank) continue;
2342     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2343     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2344     for (p = 0; p < numPoints; p++) {
2345       remotePoints[idx].index = remoteRoots[p];
2346       remotePoints[idx].rank = n;
2347       idx++;
2348     }
2349     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2350     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2351   }
2352   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2353   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2354   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2355   PetscFunctionReturn(0);
2356 }
2357