xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 7de78196c56adc1717a53efbf1d173bcdf642ecd)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
111   if (nroots > 0) {
112     ISLocalToGlobalMapping ltogCells;
113     PetscInt n, size, *cells_arr;
114     /* In parallel, apply a global cell numbering to the graph */
115     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
119     /* Convert to positive global cell numbers */
120     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
121     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
124   }
125   if (adjacency) *adjacency = graph;
126   /* Clean up */
127   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
128   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
129   ierr = PetscFree(adj);CHKERRQ(ierr);
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "DMPlexCreateNeighborCSR"
135 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136 {
137   const PetscInt maxFaceCases = 30;
138   PetscInt       numFaceCases = 0;
139   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140   PetscInt      *off, *adj;
141   PetscInt      *neighborCells = NULL;
142   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   /* For parallel partitioning, I think you have to communicate supports */
147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
148   cellDim = dim - cellHeight;
149   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
150   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
151   if (cEnd - cStart == 0) {
152     if (numVertices) *numVertices = 0;
153     if (offsets)   *offsets   = NULL;
154     if (adjacency) *adjacency = NULL;
155     PetscFunctionReturn(0);
156   }
157   numCells  = cEnd - cStart;
158   faceDepth = depth - cellHeight;
159   if (dim == depth) {
160     PetscInt f, fStart, fEnd;
161 
162     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
163     /* Count neighboring cells */
164     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
165     for (f = fStart; f < fEnd; ++f) {
166       const PetscInt *support;
167       PetscInt        supportSize;
168       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
169       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
170       if (supportSize == 2) {
171         PetscInt numChildren;
172 
173         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
174         if (!numChildren) {
175           ++off[support[0]-cStart+1];
176           ++off[support[1]-cStart+1];
177         }
178       }
179     }
180     /* Prefix sum */
181     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182     if (adjacency) {
183       PetscInt *tmp;
184 
185       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
186       ierr = PetscMalloc1((numCells+1), &tmp);CHKERRQ(ierr);
187       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
188       /* Get neighboring cells */
189       for (f = fStart; f < fEnd; ++f) {
190         const PetscInt *support;
191         PetscInt        supportSize;
192         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
193         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
194         if (supportSize == 2) {
195           PetscInt numChildren;
196 
197           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
198           if (!numChildren) {
199             adj[tmp[support[0]-cStart]++] = support[1];
200             adj[tmp[support[1]-cStart]++] = support[0];
201           }
202         }
203       }
204 #if defined(PETSC_USE_DEBUG)
205       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206 #endif
207       ierr = PetscFree(tmp);CHKERRQ(ierr);
208     }
209     if (numVertices) *numVertices = numCells;
210     if (offsets)   *offsets   = off;
211     if (adjacency) *adjacency = adj;
212     PetscFunctionReturn(0);
213   }
214   /* Setup face recognition */
215   if (faceDepth == 1) {
216     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217 
218     for (c = cStart; c < cEnd; ++c) {
219       PetscInt corners;
220 
221       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
222       if (!cornersSeen[corners]) {
223         PetscInt nFV;
224 
225         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226         cornersSeen[corners] = 1;
227 
228         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
229 
230         numFaceVertices[numFaceCases++] = nFV;
231       }
232     }
233   }
234   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
235   /* Count neighboring cells */
236   for (cell = cStart; cell < cEnd; ++cell) {
237     PetscInt numNeighbors = PETSC_DETERMINE, n;
238 
239     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
240     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241     for (n = 0; n < numNeighbors; ++n) {
242       PetscInt        cellPair[2];
243       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244       PetscInt        meetSize = 0;
245       const PetscInt *meet    = NULL;
246 
247       cellPair[0] = cell; cellPair[1] = neighborCells[n];
248       if (cellPair[0] == cellPair[1]) continue;
249       if (!found) {
250         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
251         if (meetSize) {
252           PetscInt f;
253 
254           for (f = 0; f < numFaceCases; ++f) {
255             if (numFaceVertices[f] == meetSize) {
256               found = PETSC_TRUE;
257               break;
258             }
259           }
260         }
261         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
262       }
263       if (found) ++off[cell-cStart+1];
264     }
265   }
266   /* Prefix sum */
267   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268 
269   if (adjacency) {
270     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
271     /* Get neighboring cells */
272     for (cell = cStart; cell < cEnd; ++cell) {
273       PetscInt numNeighbors = PETSC_DETERMINE, n;
274       PetscInt cellOffset   = 0;
275 
276       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
277       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278       for (n = 0; n < numNeighbors; ++n) {
279         PetscInt        cellPair[2];
280         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281         PetscInt        meetSize = 0;
282         const PetscInt *meet    = NULL;
283 
284         cellPair[0] = cell; cellPair[1] = neighborCells[n];
285         if (cellPair[0] == cellPair[1]) continue;
286         if (!found) {
287           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
288           if (meetSize) {
289             PetscInt f;
290 
291             for (f = 0; f < numFaceCases; ++f) {
292               if (numFaceVertices[f] == meetSize) {
293                 found = PETSC_TRUE;
294                 break;
295               }
296             }
297           }
298           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
299         }
300         if (found) {
301           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302           ++cellOffset;
303         }
304       }
305     }
306   }
307   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
308   if (numVertices) *numVertices = numCells;
309   if (offsets)   *offsets   = off;
310   if (adjacency) *adjacency = adj;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "DMPlexEnlargePartition"
316 /* Expand the partition by BFS on the adjacency graph */
317 PetscErrorCode DMPlexEnlargePartition(DM dm, const PetscInt start[], const PetscInt adjacency[], PetscSection origPartSection, IS origPartition, PetscSection partSection, IS *partition)
318 {
319   PetscHashI      h;
320   const PetscInt *points;
321   PetscInt      **tmpPoints, *newPoints, totPoints = 0;
322   PetscInt        pStart, pEnd, part, q;
323   PetscBool       useCone;
324   PetscErrorCode  ierr;
325 
326   PetscFunctionBegin;
327   PetscHashICreate(h);
328   ierr = PetscSectionGetChart(origPartSection, &pStart, &pEnd);CHKERRQ(ierr);
329   ierr = PetscSectionSetChart(partSection, pStart, pEnd);CHKERRQ(ierr);
330   ierr = ISGetIndices(origPartition, &points);CHKERRQ(ierr);
331   ierr = PetscMalloc1((pEnd - pStart), &tmpPoints);CHKERRQ(ierr);
332   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
333   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
334   for (part = pStart; part < pEnd; ++part) {
335     PetscInt *adj = NULL;
336     PetscInt  numPoints, nP, numNewPoints, off, p, n = 0;
337 
338     PetscHashIClear(h);
339     ierr = PetscSectionGetDof(origPartSection, part, &numPoints);CHKERRQ(ierr);
340     ierr = PetscSectionGetOffset(origPartSection, part, &off);CHKERRQ(ierr);
341     /* Add all existing points to h */
342     for (p = 0; p < numPoints; ++p) {
343       const PetscInt point = points[off+p];
344       PetscHashIAdd(h, point, 1);
345     }
346     PetscHashISize(h, nP);
347     if (nP != numPoints) SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Invalid partition has %d points, but only %d were unique", numPoints, nP);
348     /* Add all points in next BFS level */
349     for (p = 0; p < numPoints; ++p) {
350       const PetscInt point   = points[off+p];
351       PetscInt       adjSize = PETSC_DETERMINE, a;
352 
353       ierr = DMPlexGetAdjacency(dm, point, &adjSize, &adj);CHKERRQ(ierr);
354       for (a = 0; a < adjSize; ++a) PetscHashIAdd(h, adj[a], 1);
355     }
356     PetscHashISize(h, numNewPoints);
357     ierr = PetscSectionSetDof(partSection, part, numNewPoints);CHKERRQ(ierr);
358     ierr = PetscMalloc1(numNewPoints, &tmpPoints[part]);CHKERRQ(ierr);
359     ierr = PetscHashIGetKeys(h, &n, tmpPoints[part]);CHKERRQ(ierr);
360     ierr = PetscFree(adj);CHKERRQ(ierr);
361     totPoints += numNewPoints;
362   }
363   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
364   ierr = ISRestoreIndices(origPartition, &points);CHKERRQ(ierr);
365   PetscHashIDestroy(h);
366   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
367   ierr = PetscMalloc1(totPoints, &newPoints);CHKERRQ(ierr);
368   for (part = pStart, q = 0; part < pEnd; ++part) {
369     PetscInt numPoints, p;
370 
371     ierr = PetscSectionGetDof(partSection, part, &numPoints);CHKERRQ(ierr);
372     for (p = 0; p < numPoints; ++p, ++q) newPoints[q] = tmpPoints[part][p];
373     ierr = PetscFree(tmpPoints[part]);CHKERRQ(ierr);
374   }
375   ierr = PetscFree(tmpPoints);CHKERRQ(ierr);
376   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), totPoints, newPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
377   PetscFunctionReturn(0);
378 }
379 
380 #undef __FUNCT__
381 #define __FUNCT__ "PetscPartitionerRegister"
382 /*@C
383   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
384 
385   Not Collective
386 
387   Input Parameters:
388 + name        - The name of a new user-defined creation routine
389 - create_func - The creation routine itself
390 
391   Notes:
392   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
393 
394   Sample usage:
395 .vb
396     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
397 .ve
398 
399   Then, your PetscPartitioner type can be chosen with the procedural interface via
400 .vb
401     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
402     PetscPartitionerSetType(PetscPartitioner, "my_part");
403 .ve
404    or at runtime via the option
405 .vb
406     -petscpartitioner_type my_part
407 .ve
408 
409   Level: advanced
410 
411 .keywords: PetscPartitioner, register
412 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
413 
414 @*/
415 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
416 {
417   PetscErrorCode ierr;
418 
419   PetscFunctionBegin;
420   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
421   PetscFunctionReturn(0);
422 }
423 
424 #undef __FUNCT__
425 #define __FUNCT__ "PetscPartitionerSetType"
426 /*@C
427   PetscPartitionerSetType - Builds a particular PetscPartitioner
428 
429   Collective on PetscPartitioner
430 
431   Input Parameters:
432 + part - The PetscPartitioner object
433 - name - The kind of partitioner
434 
435   Options Database Key:
436 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
437 
438   Level: intermediate
439 
440 .keywords: PetscPartitioner, set, type
441 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
442 @*/
443 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
444 {
445   PetscErrorCode (*r)(PetscPartitioner);
446   PetscBool      match;
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
451   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
452   if (match) PetscFunctionReturn(0);
453 
454   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
455   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
456   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
457 
458   if (part->ops->destroy) {
459     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
460     part->ops->destroy = NULL;
461   }
462   ierr = (*r)(part);CHKERRQ(ierr);
463   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "PetscPartitionerGetType"
469 /*@C
470   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
471 
472   Not Collective
473 
474   Input Parameter:
475 . part - The PetscPartitioner
476 
477   Output Parameter:
478 . name - The PetscPartitioner type name
479 
480   Level: intermediate
481 
482 .keywords: PetscPartitioner, get, type, name
483 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
484 @*/
485 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
486 {
487   PetscErrorCode ierr;
488 
489   PetscFunctionBegin;
490   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
491   PetscValidPointer(name, 2);
492   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
493   *name = ((PetscObject) part)->type_name;
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "PetscPartitionerView"
499 /*@C
500   PetscPartitionerView - Views a PetscPartitioner
501 
502   Collective on PetscPartitioner
503 
504   Input Parameter:
505 + part - the PetscPartitioner object to view
506 - v    - the viewer
507 
508   Level: developer
509 
510 .seealso: PetscPartitionerDestroy()
511 @*/
512 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
513 {
514   PetscErrorCode ierr;
515 
516   PetscFunctionBegin;
517   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
518   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
519   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "PetscPartitionerViewFromOptions"
525 /*
526   PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed.
527 
528   Collective on PetscPartitioner
529 
530   Input Parameters:
531 + part   - the PetscPartitioner
532 . prefix - prefix to use for viewing, or NULL
533 - optionname - option to activate viewing
534 
535   Level: intermediate
536 
537 .keywords: PetscPartitioner, view, options, database
538 .seealso: VecViewFromOptions(), MatViewFromOptions()
539 */
540 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[])
541 {
542   PetscViewer       viewer;
543   PetscViewerFormat format;
544   PetscBool         flg;
545   PetscErrorCode    ierr;
546 
547   PetscFunctionBegin;
548   if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix,                       optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
549   else        {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
550   if (flg) {
551     ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr);
552     ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr);
553     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
554     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
555   }
556   PetscFunctionReturn(0);
557 }
558 #undef __FUNCT__
559 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
560 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
561 {
562   const char    *defaultType;
563   char           name[256];
564   PetscBool      flg;
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
569   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
570   else                                  defaultType = ((PetscObject) part)->type_name;
571   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
572 
573   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
574   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
575   if (flg) {
576     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
577   } else if (!((PetscObject) part)->type_name) {
578     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
579   }
580   ierr = PetscOptionsEnd();CHKERRQ(ierr);
581   PetscFunctionReturn(0);
582 }
583 
584 #undef __FUNCT__
585 #define __FUNCT__ "PetscPartitionerSetFromOptions"
586 /*@
587   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
588 
589   Collective on PetscPartitioner
590 
591   Input Parameter:
592 . part - the PetscPartitioner object to set options for
593 
594   Level: developer
595 
596 .seealso: PetscPartitionerView()
597 @*/
598 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
599 {
600   PetscErrorCode ierr;
601 
602   PetscFunctionBegin;
603   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
604   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
605 
606   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
607   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
608   /* process any options handlers added with PetscObjectAddOptionsHandler() */
609   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
610   ierr = PetscOptionsEnd();CHKERRQ(ierr);
611   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
612   PetscFunctionReturn(0);
613 }
614 
615 #undef __FUNCT__
616 #define __FUNCT__ "PetscPartitionerSetUp"
617 /*@C
618   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
619 
620   Collective on PetscPartitioner
621 
622   Input Parameter:
623 . part - the PetscPartitioner object to setup
624 
625   Level: developer
626 
627 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
628 @*/
629 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
630 {
631   PetscErrorCode ierr;
632 
633   PetscFunctionBegin;
634   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
635   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
636   PetscFunctionReturn(0);
637 }
638 
639 #undef __FUNCT__
640 #define __FUNCT__ "PetscPartitionerDestroy"
641 /*@
642   PetscPartitionerDestroy - Destroys a PetscPartitioner object
643 
644   Collective on PetscPartitioner
645 
646   Input Parameter:
647 . part - the PetscPartitioner object to destroy
648 
649   Level: developer
650 
651 .seealso: PetscPartitionerView()
652 @*/
653 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
654 {
655   PetscErrorCode ierr;
656 
657   PetscFunctionBegin;
658   if (!*part) PetscFunctionReturn(0);
659   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
660 
661   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
662   ((PetscObject) (*part))->refct = 0;
663 
664   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
665   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
666   PetscFunctionReturn(0);
667 }
668 
669 #undef __FUNCT__
670 #define __FUNCT__ "PetscPartitionerCreate"
671 /*@
672   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
673 
674   Collective on MPI_Comm
675 
676   Input Parameter:
677 . comm - The communicator for the PetscPartitioner object
678 
679   Output Parameter:
680 . part - The PetscPartitioner object
681 
682   Level: beginner
683 
684 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL
685 @*/
686 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
687 {
688   PetscPartitioner p;
689   PetscErrorCode   ierr;
690 
691   PetscFunctionBegin;
692   PetscValidPointer(part, 2);
693   *part = NULL;
694   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
695 
696   ierr = PetscHeaderCreate(p, _p_PetscPartitioner, struct _PetscPartitionerOps, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
697   ierr = PetscMemzero(p->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
698 
699   *part = p;
700   PetscFunctionReturn(0);
701 }
702 
703 #undef __FUNCT__
704 #define __FUNCT__ "PetscPartitionerPartition"
705 /*@
706   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
707 
708   Collective on DM
709 
710   Input Parameters:
711 + part    - The PetscPartitioner
712 - dm      - The mesh DM
713 
714   Output Parameters:
715 + partSection     - The PetscSection giving the division of points by partition
716 - partition       - The list of points by partition
717 
718   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
719 
720   Level: developer
721 
722 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
723 @*/
724 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
725 {
726   PetscMPIInt    size;
727   PetscErrorCode ierr;
728 
729   PetscFunctionBegin;
730   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
731   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
732   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
733   PetscValidPointer(partition, 5);
734   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
735   if (size == 1) {
736     PetscInt *points;
737     PetscInt  cStart, cEnd, c;
738 
739     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
740     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
741     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
742     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
743     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
744     for (c = cStart; c < cEnd; ++c) points[c] = c;
745     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
746     PetscFunctionReturn(0);
747   }
748   if (part->height == 0) {
749     PetscInt  numVertices;
750     PetscInt *start     = NULL;
751     PetscInt *adjacency = NULL;
752 
753     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
754     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
755     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
756     ierr = PetscFree(start);CHKERRQ(ierr);
757     ierr = PetscFree(adjacency);CHKERRQ(ierr);
758   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
759   PetscFunctionReturn(0);
760 }
761 
762 #undef __FUNCT__
763 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
764 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
765 {
766   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
767   PetscErrorCode          ierr;
768 
769   PetscFunctionBegin;
770   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
771   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
772   ierr = PetscFree(p);CHKERRQ(ierr);
773   PetscFunctionReturn(0);
774 }
775 
776 #undef __FUNCT__
777 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
778 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
779 {
780   PetscViewerFormat format;
781   PetscErrorCode    ierr;
782 
783   PetscFunctionBegin;
784   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
785   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
786   PetscFunctionReturn(0);
787 }
788 
789 #undef __FUNCT__
790 #define __FUNCT__ "PetscPartitionerView_Shell"
791 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
792 {
793   PetscBool      iascii;
794   PetscErrorCode ierr;
795 
796   PetscFunctionBegin;
797   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
798   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
799   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
800   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
801   PetscFunctionReturn(0);
802 }
803 
804 #undef __FUNCT__
805 #define __FUNCT__ "PetscPartitionerPartition_Shell"
806 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
807 {
808   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
809   PetscInt                np;
810   PetscErrorCode          ierr;
811 
812   PetscFunctionBegin;
813   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
814   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
815   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
816   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
817   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
818   *partition = p->partition;
819   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
820   PetscFunctionReturn(0);
821 }
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
825 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
826 {
827   PetscFunctionBegin;
828   part->ops->view      = PetscPartitionerView_Shell;
829   part->ops->destroy   = PetscPartitionerDestroy_Shell;
830   part->ops->partition = PetscPartitionerPartition_Shell;
831   PetscFunctionReturn(0);
832 }
833 
834 /*MC
835   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
836 
837   Level: intermediate
838 
839 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
840 M*/
841 
842 #undef __FUNCT__
843 #define __FUNCT__ "PetscPartitionerCreate_Shell"
844 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
845 {
846   PetscPartitioner_Shell *p;
847   PetscErrorCode          ierr;
848 
849   PetscFunctionBegin;
850   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
851   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
852   part->data = p;
853 
854   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
855   PetscFunctionReturn(0);
856 }
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "PetscPartitionerShellSetPartition"
860 /*@C
861   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
862 
863   Collective on PART
864 
865   Input Parameters:
866 + part     - The PetscPartitioner
867 . numProcs - The number of partitions
868 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
869 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
870 
871   Level: developer
872 
873   Notes:
874 
875     It is safe to free the sizes and points arrays after use in this routine.
876 
877 .seealso DMPlexDistribute(), PetscPartitionerCreate()
878 @*/
879 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
880 {
881   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
882   PetscInt                proc, numPoints;
883   PetscErrorCode          ierr;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
887   if (sizes) {PetscValidPointer(sizes, 3);}
888   if (sizes) {PetscValidPointer(points, 4);}
889   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
890   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
891   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
892   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
893   if (sizes) {
894     for (proc = 0; proc < numProcs; ++proc) {
895       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
896     }
897   }
898   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
899   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
900   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
901   PetscFunctionReturn(0);
902 }
903 
904 #undef __FUNCT__
905 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
906 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
907 {
908   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
909   PetscErrorCode          ierr;
910 
911   PetscFunctionBegin;
912   ierr = PetscFree(p);CHKERRQ(ierr);
913   PetscFunctionReturn(0);
914 }
915 
916 #undef __FUNCT__
917 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
918 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
919 {
920   PetscViewerFormat format;
921   PetscErrorCode    ierr;
922 
923   PetscFunctionBegin;
924   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
925   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
926   PetscFunctionReturn(0);
927 }
928 
929 #undef __FUNCT__
930 #define __FUNCT__ "PetscPartitionerView_Chaco"
931 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
932 {
933   PetscBool      iascii;
934   PetscErrorCode ierr;
935 
936   PetscFunctionBegin;
937   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
938   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
939   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
940   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
941   PetscFunctionReturn(0);
942 }
943 
944 #if defined(PETSC_HAVE_CHACO)
945 #if defined(PETSC_HAVE_UNISTD_H)
946 #include <unistd.h>
947 #endif
948 /* Chaco does not have an include file */
949 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
950                        float *ewgts, float *x, float *y, float *z, char *outassignname,
951                        char *outfilename, short *assignment, int architecture, int ndims_tot,
952                        int mesh_dims[3], double *goal, int global_method, int local_method,
953                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
954 
955 extern int FREE_GRAPH;
956 #endif
957 
958 #undef __FUNCT__
959 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
960 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
961 {
962 #if defined(PETSC_HAVE_CHACO)
963   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
964   MPI_Comm       comm;
965   int            nvtxs          = numVertices; /* number of vertices in full graph */
966   int           *vwgts          = NULL;   /* weights for all vertices */
967   float         *ewgts          = NULL;   /* weights for all edges */
968   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
969   char          *outassignname  = NULL;   /*  name of assignment output file */
970   char          *outfilename    = NULL;   /* output file name */
971   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
972   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
973   int            mesh_dims[3];            /* dimensions of mesh of processors */
974   double        *goal          = NULL;    /* desired set sizes for each set */
975   int            global_method = 1;       /* global partitioning algorithm */
976   int            local_method  = 1;       /* local partitioning algorithm */
977   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
978   int            vmax          = 200;     /* how many vertices to coarsen down to? */
979   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
980   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
981   long           seed          = 123636512; /* for random graph mutations */
982   short int     *assignment;              /* Output partition */
983   int            fd_stdout, fd_pipe[2];
984   PetscInt      *points;
985   int            i, v, p;
986   PetscErrorCode ierr;
987 
988   PetscFunctionBegin;
989   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
990   if (!numVertices) {
991     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
992     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
993     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
994     PetscFunctionReturn(0);
995   }
996   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
997   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
998 
999   if (global_method == INERTIAL_METHOD) {
1000     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1001     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1002   }
1003   mesh_dims[0] = nparts;
1004   mesh_dims[1] = 1;
1005   mesh_dims[2] = 1;
1006   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1007   /* Chaco outputs to stdout. We redirect this to a buffer. */
1008   /* TODO: check error codes for UNIX calls */
1009 #if defined(PETSC_HAVE_UNISTD_H)
1010   {
1011     int piperet;
1012     piperet = pipe(fd_pipe);
1013     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1014     fd_stdout = dup(1);
1015     close(1);
1016     dup2(fd_pipe[1], 1);
1017   }
1018 #endif
1019   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1020                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1021                    vmax, ndims, eigtol, seed);
1022 #if defined(PETSC_HAVE_UNISTD_H)
1023   {
1024     char msgLog[10000];
1025     int  count;
1026 
1027     fflush(stdout);
1028     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1029     if (count < 0) count = 0;
1030     msgLog[count] = 0;
1031     close(1);
1032     dup2(fd_stdout, 1);
1033     close(fd_stdout);
1034     close(fd_pipe[0]);
1035     close(fd_pipe[1]);
1036     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1037   }
1038 #endif
1039   /* Convert to PetscSection+IS */
1040   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1041   for (v = 0; v < nvtxs; ++v) {
1042     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1043   }
1044   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1045   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1046   for (p = 0, i = 0; p < nparts; ++p) {
1047     for (v = 0; v < nvtxs; ++v) {
1048       if (assignment[v] == p) points[i++] = v;
1049     }
1050   }
1051   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1052   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1053   if (global_method == INERTIAL_METHOD) {
1054     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1055   }
1056   ierr = PetscFree(assignment);CHKERRQ(ierr);
1057   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1058   PetscFunctionReturn(0);
1059 #else
1060   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1061 #endif
1062 }
1063 
1064 #undef __FUNCT__
1065 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1066 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1067 {
1068   PetscFunctionBegin;
1069   part->ops->view      = PetscPartitionerView_Chaco;
1070   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1071   part->ops->partition = PetscPartitionerPartition_Chaco;
1072   PetscFunctionReturn(0);
1073 }
1074 
1075 /*MC
1076   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1077 
1078   Level: intermediate
1079 
1080 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1081 M*/
1082 
1083 #undef __FUNCT__
1084 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1085 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1086 {
1087   PetscPartitioner_Chaco *p;
1088   PetscErrorCode          ierr;
1089 
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1092   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1093   part->data = p;
1094 
1095   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1096   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 #undef __FUNCT__
1101 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1102 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1103 {
1104   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1105   PetscErrorCode             ierr;
1106 
1107   PetscFunctionBegin;
1108   ierr = PetscFree(p);CHKERRQ(ierr);
1109   PetscFunctionReturn(0);
1110 }
1111 
1112 #undef __FUNCT__
1113 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1114 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1115 {
1116   PetscViewerFormat format;
1117   PetscErrorCode    ierr;
1118 
1119   PetscFunctionBegin;
1120   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1121   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1122   PetscFunctionReturn(0);
1123 }
1124 
1125 #undef __FUNCT__
1126 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1127 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1128 {
1129   PetscBool      iascii;
1130   PetscErrorCode ierr;
1131 
1132   PetscFunctionBegin;
1133   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1134   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1135   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1136   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1137   PetscFunctionReturn(0);
1138 }
1139 
1140 #if defined(PETSC_HAVE_PARMETIS)
1141 #include <parmetis.h>
1142 #endif
1143 
1144 #undef __FUNCT__
1145 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1146 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1147 {
1148 #if defined(PETSC_HAVE_PARMETIS)
1149   MPI_Comm       comm;
1150   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1151   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1152   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1153   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1154   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1155   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1156   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1157   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1158   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1159   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1160   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1161   PetscInt       options[5];               /* Options */
1162   /* Outputs */
1163   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1164   PetscInt      *assignment, *points;
1165   PetscMPIInt    rank, p, v, i;
1166   PetscErrorCode ierr;
1167 
1168   PetscFunctionBegin;
1169   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1170   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1171   options[0] = 0; /* Use all defaults */
1172   /* Calculate vertex distribution */
1173   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1174   vtxdist[0] = 0;
1175   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1176   for (p = 2; p <= nparts; ++p) {
1177     vtxdist[p] += vtxdist[p-1];
1178   }
1179   /* Calculate weights */
1180   for (p = 0; p < nparts; ++p) {
1181     tpwgts[p] = 1.0/nparts;
1182   }
1183   ubvec[0] = 1.05;
1184 
1185   if (nparts == 1) {
1186     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1187   } else {
1188     if (vtxdist[1] == vtxdist[nparts]) {
1189       if (!rank) {
1190         PetscStackPush("METIS_PartGraphKway");
1191         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1192         PetscStackPop;
1193         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1194       }
1195     } else {
1196       PetscStackPush("ParMETIS_V3_PartKway");
1197       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1198       PetscStackPop;
1199       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1200     }
1201   }
1202   /* Convert to PetscSection+IS */
1203   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1204   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1205   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1206   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1207   for (p = 0, i = 0; p < nparts; ++p) {
1208     for (v = 0; v < nvtxs; ++v) {
1209       if (assignment[v] == p) points[i++] = v;
1210     }
1211   }
1212   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1213   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1214   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1215 #else
1216   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1217 #endif
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 #undef __FUNCT__
1222 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1223 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1224 {
1225   PetscFunctionBegin;
1226   part->ops->view      = PetscPartitionerView_ParMetis;
1227   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1228   part->ops->partition = PetscPartitionerPartition_ParMetis;
1229   PetscFunctionReturn(0);
1230 }
1231 
1232 /*MC
1233   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1234 
1235   Level: intermediate
1236 
1237 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1238 M*/
1239 
1240 #undef __FUNCT__
1241 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1242 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1243 {
1244   PetscPartitioner_ParMetis *p;
1245   PetscErrorCode          ierr;
1246 
1247   PetscFunctionBegin;
1248   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1249   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1250   part->data = p;
1251 
1252   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1253   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 #undef __FUNCT__
1258 #define __FUNCT__ "DMPlexGetPartitioner"
1259 /*@
1260   DMPlexGetPartitioner - Get the mesh partitioner
1261 
1262   Not collective
1263 
1264   Input Parameter:
1265 . dm - The DM
1266 
1267   Output Parameter:
1268 . part - The PetscPartitioner
1269 
1270   Level: developer
1271 
1272   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1273 
1274 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1275 @*/
1276 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1277 {
1278   DM_Plex *mesh = (DM_Plex *) dm->data;
1279 
1280   PetscFunctionBegin;
1281   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1282   PetscValidPointer(part, 2);
1283   *part = mesh->partitioner;
1284   PetscFunctionReturn(0);
1285 }
1286 
1287 #undef __FUNCT__
1288 #define __FUNCT__ "DMPlexSetPartitioner"
1289 /*@
1290   DMPlexSetPartitioner - Set the mesh partitioner
1291 
1292   logically collective on dm and part
1293 
1294   Input Parameters:
1295 + dm - The DM
1296 - part - The partitioner
1297 
1298   Level: developer
1299 
1300   Note: Any existing PetscPartitioner will be destroyed.
1301 
1302 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1303 @*/
1304 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1305 {
1306   DM_Plex       *mesh = (DM_Plex *) dm->data;
1307   PetscErrorCode ierr;
1308 
1309   PetscFunctionBegin;
1310   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1311   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1312   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1313   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1314   mesh->partitioner = part;
1315   PetscFunctionReturn(0);
1316 }
1317 
1318 #undef __FUNCT__
1319 #define __FUNCT__ "DMPlexMarkTreeClosure"
1320 static PetscErrorCode DMPlexMarkTreeClosure(DM dm, PetscSegBuffer segpart, PetscBT bt, PetscInt point, PetscInt *partSize)
1321 {
1322   PetscInt       parent, closureSize, *closure = NULL, i, pStart, pEnd;
1323   PetscErrorCode ierr;
1324 
1325   PetscFunctionBegin;
1326   ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1327   if (parent == point) PetscFunctionReturn(0);
1328   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1329   ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1330   for (i = 0; i < closureSize; i++) {
1331     PetscInt cpoint = closure[2*i];
1332 
1333     if (!PetscBTLookupSet(bt,cpoint-pStart)) {
1334       PetscInt *PETSC_RESTRICT pt;
1335       (*partSize)++;
1336       ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr);
1337       *pt = cpoint;
1338     }
1339     ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,partSize);CHKERRQ(ierr);
1340   }
1341   ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1342   PetscFunctionReturn(0);
1343 }
1344 
1345 #undef __FUNCT__
1346 #define __FUNCT__ "DMPlexCreatePartitionClosure"
1347 PetscErrorCode DMPlexCreatePartitionClosure(DM dm, PetscSection pointSection, IS pointPartition, PetscSection *section, IS *partition)
1348 {
1349   /* const PetscInt  height = 0; */
1350   const PetscInt *partArray;
1351   PetscInt       *allPoints, *packPoints;
1352   PetscInt        rStart, rEnd, rank, pStart, pEnd, newSize;
1353   PetscErrorCode  ierr;
1354   PetscBT         bt;
1355   PetscSegBuffer  segpack,segpart;
1356 
1357   PetscFunctionBegin;
1358   ierr = PetscSectionGetChart(pointSection, &rStart, &rEnd);CHKERRQ(ierr);
1359   ierr = ISGetIndices(pointPartition, &partArray);CHKERRQ(ierr);
1360   ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), section);CHKERRQ(ierr);
1361   ierr = PetscSectionSetChart(*section, rStart, rEnd);CHKERRQ(ierr);
1362   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1363   ierr = PetscBTCreate(pEnd-pStart,&bt);CHKERRQ(ierr);
1364   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpack);CHKERRQ(ierr);
1365   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpart);CHKERRQ(ierr);
1366   for (rank = rStart; rank < rEnd; ++rank) {
1367     PetscInt partSize = 0, numPoints, offset, p, *PETSC_RESTRICT placePoints;
1368 
1369     ierr = PetscSectionGetDof(pointSection, rank, &numPoints);CHKERRQ(ierr);
1370     ierr = PetscSectionGetOffset(pointSection, rank, &offset);CHKERRQ(ierr);
1371     for (p = 0; p < numPoints; ++p) {
1372       PetscInt  point   = partArray[offset+p], closureSize, c;
1373       PetscInt *closure = NULL;
1374 
1375       /* TODO Include support for height > 0 case */
1376       ierr = DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1377       for (c=0; c<closureSize; c++) {
1378         PetscInt cpoint = closure[c*2];
1379         if (!PetscBTLookupSet(bt,cpoint-pStart)) {
1380           PetscInt *PETSC_RESTRICT pt;
1381           partSize++;
1382           ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr);
1383           *pt = cpoint;
1384         }
1385         ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,&partSize);CHKERRQ(ierr);
1386       }
1387       ierr = DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1388     }
1389     ierr = PetscSectionSetDof(*section, rank, partSize);CHKERRQ(ierr);
1390     ierr = PetscSegBufferGetInts(segpack,partSize,&placePoints);CHKERRQ(ierr);
1391     ierr = PetscSegBufferExtractTo(segpart,placePoints);CHKERRQ(ierr);
1392     ierr = PetscSortInt(partSize,placePoints);CHKERRQ(ierr);
1393     for (p=0; p<partSize; p++) {ierr = PetscBTClear(bt,placePoints[p]-pStart);CHKERRQ(ierr);}
1394   }
1395   ierr = PetscBTDestroy(&bt);CHKERRQ(ierr);
1396   ierr = PetscSegBufferDestroy(&segpart);CHKERRQ(ierr);
1397 
1398   ierr = PetscSectionSetUp(*section);CHKERRQ(ierr);
1399   ierr = PetscSectionGetStorageSize(*section, &newSize);CHKERRQ(ierr);
1400   ierr = PetscMalloc1(newSize, &allPoints);CHKERRQ(ierr);
1401 
1402   ierr = PetscSegBufferExtractInPlace(segpack,&packPoints);CHKERRQ(ierr);
1403   for (rank = rStart; rank < rEnd; ++rank) {
1404     PetscInt numPoints, offset;
1405 
1406     ierr = PetscSectionGetDof(*section, rank, &numPoints);CHKERRQ(ierr);
1407     ierr = PetscSectionGetOffset(*section, rank, &offset);CHKERRQ(ierr);
1408     ierr = PetscMemcpy(&allPoints[offset], packPoints, numPoints * sizeof(PetscInt));CHKERRQ(ierr);
1409     packPoints += numPoints;
1410   }
1411 
1412   ierr = PetscSegBufferDestroy(&segpack);CHKERRQ(ierr);
1413   ierr = ISRestoreIndices(pointPartition, &partArray);CHKERRQ(ierr);
1414   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), newSize, allPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1415   PetscFunctionReturn(0);
1416 }
1417 
1418 #undef __FUNCT__
1419 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1420 /*@
1421   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1422 
1423   Input Parameters:
1424 + dm     - The DM
1425 - label  - DMLabel assinging ranks to remote roots
1426 
1427   Level: developer
1428 
1429 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1430 @*/
1431 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1432 {
1433   IS              rankIS,   pointIS;
1434   const PetscInt *ranks,   *points;
1435   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1436   PetscInt       *closure = NULL;
1437   PetscErrorCode  ierr;
1438 
1439   PetscFunctionBegin;
1440   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1441   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1442   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1443   for (r = 0; r < numRanks; ++r) {
1444     const PetscInt rank = ranks[r];
1445 
1446     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1447     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1448     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1449     for (p = 0; p < numPoints; ++p) {
1450       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1451       for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);}
1452     }
1453     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1454     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1455   }
1456   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1457   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1458   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1459   PetscFunctionReturn(0);
1460 }
1461 
1462 #undef __FUNCT__
1463 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1464 /*@
1465   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1466 
1467   Input Parameters:
1468 + dm     - The DM
1469 - label  - DMLabel assinging ranks to remote roots
1470 
1471   Level: developer
1472 
1473 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1474 @*/
1475 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1476 {
1477   IS              rankIS,   pointIS;
1478   const PetscInt *ranks,   *points;
1479   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1480   PetscInt       *adj = NULL;
1481   PetscErrorCode  ierr;
1482 
1483   PetscFunctionBegin;
1484   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1485   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1486   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1487   for (r = 0; r < numRanks; ++r) {
1488     const PetscInt rank = ranks[r];
1489 
1490     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1491     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1492     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1493     for (p = 0; p < numPoints; ++p) {
1494       adjSize = PETSC_DETERMINE;
1495       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1496       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1497     }
1498     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1499     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1500   }
1501   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1502   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1503   ierr = PetscFree(adj);CHKERRQ(ierr);
1504   PetscFunctionReturn(0);
1505 }
1506 
1507 #undef __FUNCT__
1508 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1509 /*@
1510   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1511 
1512   Input Parameters:
1513 + dm        - The DM
1514 . rootLabel - DMLabel assinging ranks to local roots
1515 . processSF - A star forest mapping into the local index on each remote rank
1516 
1517   Output Parameter:
1518 - leafLabel - DMLabel assinging ranks to remote roots
1519 
1520   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1521   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1522 
1523   Level: developer
1524 
1525 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1526 @*/
1527 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1528 {
1529   MPI_Comm           comm;
1530   PetscMPIInt        rank, numProcs;
1531   PetscInt           p, n, numNeighbors, size, l, nleaves;
1532   PetscSF            sfPoint;
1533   PetscSFNode       *rootPoints, *leafPoints;
1534   PetscSection       rootSection, leafSection;
1535   const PetscSFNode *remote;
1536   const PetscInt    *local, *neighbors;
1537   IS                 valueIS;
1538   PetscErrorCode     ierr;
1539 
1540   PetscFunctionBegin;
1541   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1542   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1543   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1544   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1545 
1546   /* Convert to (point, rank) and use actual owners */
1547   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1548   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1549   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1550   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1551   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1552   for (n = 0; n < numNeighbors; ++n) {
1553     PetscInt numPoints;
1554 
1555     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1556     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1557   }
1558   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1559   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1560   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1561   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1562   for (n = 0; n < numNeighbors; ++n) {
1563     IS              pointIS;
1564     const PetscInt *points;
1565     PetscInt        off, numPoints, p;
1566 
1567     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1568     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1569     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1570     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1571     for (p = 0; p < numPoints; ++p) {
1572       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1573       else       {l = -1;}
1574       if (l >= 0) {rootPoints[off+p] = remote[l];}
1575       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1576     }
1577     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1578     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1579   }
1580   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1581   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1582   /* Communicate overlap */
1583   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1584   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1585   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1586   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1587   for (p = 0; p < size; p++) {
1588     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1589   }
1590   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1591   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1592   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1593   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1594   PetscFunctionReturn(0);
1595 }
1596 
1597 #undef __FUNCT__
1598 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1599 /*@
1600   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1601 
1602   Input Parameters:
1603 + dm    - The DM
1604 . label - DMLabel assinging ranks to remote roots
1605 
1606   Output Parameter:
1607 - sf    - The star forest communication context encapsulating the defined mapping
1608 
1609   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1610 
1611   Level: developer
1612 
1613 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1614 @*/
1615 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1616 {
1617   PetscMPIInt     rank, numProcs;
1618   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1619   PetscSFNode    *remotePoints;
1620   IS              remoteRootIS;
1621   const PetscInt *remoteRoots;
1622   PetscErrorCode ierr;
1623 
1624   PetscFunctionBegin;
1625   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1626   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1627 
1628   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1629     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1630     numRemote += numPoints;
1631   }
1632   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1633   /* Put owned points first */
1634   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1635   if (numPoints > 0) {
1636     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1637     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1638     for (p = 0; p < numPoints; p++) {
1639       remotePoints[idx].index = remoteRoots[p];
1640       remotePoints[idx].rank = rank;
1641       idx++;
1642     }
1643     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1644     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1645   }
1646   /* Now add remote points */
1647   for (n = 0; n < numProcs; ++n) {
1648     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1649     if (numPoints <= 0 || n == rank) continue;
1650     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1651     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1652     for (p = 0; p < numPoints; p++) {
1653       remotePoints[idx].index = remoteRoots[p];
1654       remotePoints[idx].rank = n;
1655       idx++;
1656     }
1657     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1658     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1659   }
1660   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1661   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1662   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1663   PetscFunctionReturn(0);
1664 }
1665