xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 909a08a465b1d1e590479744cf0e80556843dde1)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc2(*numVertices+1, &vOffsets, size, adjacency);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   if (adjacency) {
111     ierr = PetscSegBufferExtractTo(adjBuffer, *adjacency);CHKERRQ(ierr);
112     if (nroots > 0) {
113       ISLocalToGlobalMapping ltogCells;
114       PetscInt n, size, *cells_arr;
115       /* In parallel, apply a global cell numbering to the graph */
116       ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117       ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118       ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119       ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120       /* Convert to positive global cell numbers */
121       for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122       ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123       ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], *adjacency, *adjacency);CHKERRQ(ierr);
124       ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     }
126   }
127   /* Clean up */
128   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
129   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
130   ierr = PetscFree(adj);CHKERRQ(ierr);
131   PetscFunctionReturn(0);
132 }
133 
134 #undef __FUNCT__
135 #define __FUNCT__ "DMPlexCreateNeighborCSR"
136 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
137 {
138   const PetscInt maxFaceCases = 30;
139   PetscInt       numFaceCases = 0;
140   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
141   PetscInt      *off, *adj;
142   PetscInt      *neighborCells = NULL;
143   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
144   PetscErrorCode ierr;
145 
146   PetscFunctionBegin;
147   /* For parallel partitioning, I think you have to communicate supports */
148   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
149   cellDim = dim - cellHeight;
150   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
151   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
152   if (cEnd - cStart == 0) {
153     if (numVertices) *numVertices = 0;
154     if (offsets)   *offsets   = NULL;
155     if (adjacency) *adjacency = NULL;
156     PetscFunctionReturn(0);
157   }
158   numCells  = cEnd - cStart;
159   faceDepth = depth - cellHeight;
160   if (dim == depth) {
161     PetscInt f, fStart, fEnd;
162 
163     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
164     /* Count neighboring cells */
165     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
166     for (f = fStart; f < fEnd; ++f) {
167       const PetscInt *support;
168       PetscInt        supportSize;
169       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
170       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
171       if (supportSize == 2) {
172         PetscInt numChildren;
173 
174         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
175         if (!numChildren) {
176           ++off[support[0]-cStart+1];
177           ++off[support[1]-cStart+1];
178         }
179       }
180     }
181     /* Prefix sum */
182     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
183     if (adjacency) {
184       PetscInt *tmp;
185 
186       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
187       ierr = PetscMalloc1((numCells+1), &tmp);CHKERRQ(ierr);
188       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
189       /* Get neighboring cells */
190       for (f = fStart; f < fEnd; ++f) {
191         const PetscInt *support;
192         PetscInt        supportSize;
193         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
194         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
195         if (supportSize == 2) {
196           PetscInt numChildren;
197 
198           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
199           if (!numChildren) {
200             adj[tmp[support[0]-cStart]++] = support[1];
201             adj[tmp[support[1]-cStart]++] = support[0];
202           }
203         }
204       }
205 #if defined(PETSC_USE_DEBUG)
206       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
207 #endif
208       ierr = PetscFree(tmp);CHKERRQ(ierr);
209     }
210     if (numVertices) *numVertices = numCells;
211     if (offsets)   *offsets   = off;
212     if (adjacency) *adjacency = adj;
213     PetscFunctionReturn(0);
214   }
215   /* Setup face recognition */
216   if (faceDepth == 1) {
217     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
218 
219     for (c = cStart; c < cEnd; ++c) {
220       PetscInt corners;
221 
222       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
223       if (!cornersSeen[corners]) {
224         PetscInt nFV;
225 
226         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
227         cornersSeen[corners] = 1;
228 
229         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
230 
231         numFaceVertices[numFaceCases++] = nFV;
232       }
233     }
234   }
235   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
236   /* Count neighboring cells */
237   for (cell = cStart; cell < cEnd; ++cell) {
238     PetscInt numNeighbors = PETSC_DETERMINE, n;
239 
240     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
241     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
242     for (n = 0; n < numNeighbors; ++n) {
243       PetscInt        cellPair[2];
244       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
245       PetscInt        meetSize = 0;
246       const PetscInt *meet    = NULL;
247 
248       cellPair[0] = cell; cellPair[1] = neighborCells[n];
249       if (cellPair[0] == cellPair[1]) continue;
250       if (!found) {
251         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
252         if (meetSize) {
253           PetscInt f;
254 
255           for (f = 0; f < numFaceCases; ++f) {
256             if (numFaceVertices[f] == meetSize) {
257               found = PETSC_TRUE;
258               break;
259             }
260           }
261         }
262         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
263       }
264       if (found) ++off[cell-cStart+1];
265     }
266   }
267   /* Prefix sum */
268   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
269 
270   if (adjacency) {
271     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
272     /* Get neighboring cells */
273     for (cell = cStart; cell < cEnd; ++cell) {
274       PetscInt numNeighbors = PETSC_DETERMINE, n;
275       PetscInt cellOffset   = 0;
276 
277       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
278       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
279       for (n = 0; n < numNeighbors; ++n) {
280         PetscInt        cellPair[2];
281         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
282         PetscInt        meetSize = 0;
283         const PetscInt *meet    = NULL;
284 
285         cellPair[0] = cell; cellPair[1] = neighborCells[n];
286         if (cellPair[0] == cellPair[1]) continue;
287         if (!found) {
288           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
289           if (meetSize) {
290             PetscInt f;
291 
292             for (f = 0; f < numFaceCases; ++f) {
293               if (numFaceVertices[f] == meetSize) {
294                 found = PETSC_TRUE;
295                 break;
296               }
297             }
298           }
299           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
300         }
301         if (found) {
302           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
303           ++cellOffset;
304         }
305       }
306     }
307   }
308   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
309   if (numVertices) *numVertices = numCells;
310   if (offsets)   *offsets   = off;
311   if (adjacency) *adjacency = adj;
312   PetscFunctionReturn(0);
313 }
314 
315 #undef __FUNCT__
316 #define __FUNCT__ "DMPlexEnlargePartition"
317 /* Expand the partition by BFS on the adjacency graph */
318 PetscErrorCode DMPlexEnlargePartition(DM dm, const PetscInt start[], const PetscInt adjacency[], PetscSection origPartSection, IS origPartition, PetscSection partSection, IS *partition)
319 {
320   PetscHashI      h;
321   const PetscInt *points;
322   PetscInt      **tmpPoints, *newPoints, totPoints = 0;
323   PetscInt        pStart, pEnd, part, q;
324   PetscBool       useCone;
325   PetscErrorCode  ierr;
326 
327   PetscFunctionBegin;
328   PetscHashICreate(h);
329   ierr = PetscSectionGetChart(origPartSection, &pStart, &pEnd);CHKERRQ(ierr);
330   ierr = PetscSectionSetChart(partSection, pStart, pEnd);CHKERRQ(ierr);
331   ierr = ISGetIndices(origPartition, &points);CHKERRQ(ierr);
332   ierr = PetscMalloc1((pEnd - pStart), &tmpPoints);CHKERRQ(ierr);
333   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
334   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
335   for (part = pStart; part < pEnd; ++part) {
336     PetscInt *adj = NULL;
337     PetscInt  numPoints, nP, numNewPoints, off, p, n = 0;
338 
339     PetscHashIClear(h);
340     ierr = PetscSectionGetDof(origPartSection, part, &numPoints);CHKERRQ(ierr);
341     ierr = PetscSectionGetOffset(origPartSection, part, &off);CHKERRQ(ierr);
342     /* Add all existing points to h */
343     for (p = 0; p < numPoints; ++p) {
344       const PetscInt point = points[off+p];
345       PetscHashIAdd(h, point, 1);
346     }
347     PetscHashISize(h, nP);
348     if (nP != numPoints) SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Invalid partition has %d points, but only %d were unique", numPoints, nP);
349     /* Add all points in next BFS level */
350     for (p = 0; p < numPoints; ++p) {
351       const PetscInt point   = points[off+p];
352       PetscInt       adjSize = PETSC_DETERMINE, a;
353 
354       ierr = DMPlexGetAdjacency(dm, point, &adjSize, &adj);CHKERRQ(ierr);
355       for (a = 0; a < adjSize; ++a) PetscHashIAdd(h, adj[a], 1);
356     }
357     PetscHashISize(h, numNewPoints);
358     ierr = PetscSectionSetDof(partSection, part, numNewPoints);CHKERRQ(ierr);
359     ierr = PetscMalloc1(numNewPoints, &tmpPoints[part]);CHKERRQ(ierr);
360     ierr = PetscHashIGetKeys(h, &n, tmpPoints[part]);CHKERRQ(ierr);
361     ierr = PetscFree(adj);CHKERRQ(ierr);
362     totPoints += numNewPoints;
363   }
364   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
365   ierr = ISRestoreIndices(origPartition, &points);CHKERRQ(ierr);
366   PetscHashIDestroy(h);
367   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
368   ierr = PetscMalloc1(totPoints, &newPoints);CHKERRQ(ierr);
369   for (part = pStart, q = 0; part < pEnd; ++part) {
370     PetscInt numPoints, p;
371 
372     ierr = PetscSectionGetDof(partSection, part, &numPoints);CHKERRQ(ierr);
373     for (p = 0; p < numPoints; ++p, ++q) newPoints[q] = tmpPoints[part][p];
374     ierr = PetscFree(tmpPoints[part]);CHKERRQ(ierr);
375   }
376   ierr = PetscFree(tmpPoints);CHKERRQ(ierr);
377   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), totPoints, newPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
378   PetscFunctionReturn(0);
379 }
380 
381 #undef __FUNCT__
382 #define __FUNCT__ "PetscPartitionerRegister"
383 /*@C
384   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
385 
386   Not Collective
387 
388   Input Parameters:
389 + name        - The name of a new user-defined creation routine
390 - create_func - The creation routine itself
391 
392   Notes:
393   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
394 
395   Sample usage:
396 .vb
397     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
398 .ve
399 
400   Then, your PetscPartitioner type can be chosen with the procedural interface via
401 .vb
402     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
403     PetscPartitionerSetType(PetscPartitioner, "my_part");
404 .ve
405    or at runtime via the option
406 .vb
407     -petscpartitioner_type my_part
408 .ve
409 
410   Level: advanced
411 
412 .keywords: PetscPartitioner, register
413 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
414 
415 @*/
416 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
417 {
418   PetscErrorCode ierr;
419 
420   PetscFunctionBegin;
421   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
422   PetscFunctionReturn(0);
423 }
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "PetscPartitionerSetType"
427 /*@C
428   PetscPartitionerSetType - Builds a particular PetscPartitioner
429 
430   Collective on PetscPartitioner
431 
432   Input Parameters:
433 + part - The PetscPartitioner object
434 - name - The kind of partitioner
435 
436   Options Database Key:
437 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
438 
439   Level: intermediate
440 
441 .keywords: PetscPartitioner, set, type
442 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
443 @*/
444 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
445 {
446   PetscErrorCode (*r)(PetscPartitioner);
447   PetscBool      match;
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
452   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
453   if (match) PetscFunctionReturn(0);
454 
455   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
456   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
457   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
458 
459   if (part->ops->destroy) {
460     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
461     part->ops->destroy = NULL;
462   }
463   ierr = (*r)(part);CHKERRQ(ierr);
464   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
465   PetscFunctionReturn(0);
466 }
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "PetscPartitionerGetType"
470 /*@C
471   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
472 
473   Not Collective
474 
475   Input Parameter:
476 . part - The PetscPartitioner
477 
478   Output Parameter:
479 . name - The PetscPartitioner type name
480 
481   Level: intermediate
482 
483 .keywords: PetscPartitioner, get, type, name
484 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
485 @*/
486 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
487 {
488   PetscErrorCode ierr;
489 
490   PetscFunctionBegin;
491   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
492   PetscValidPointer(name, 2);
493   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
494   *name = ((PetscObject) part)->type_name;
495   PetscFunctionReturn(0);
496 }
497 
498 #undef __FUNCT__
499 #define __FUNCT__ "PetscPartitionerView"
500 /*@C
501   PetscPartitionerView - Views a PetscPartitioner
502 
503   Collective on PetscPartitioner
504 
505   Input Parameter:
506 + part - the PetscPartitioner object to view
507 - v    - the viewer
508 
509   Level: developer
510 
511 .seealso: PetscPartitionerDestroy()
512 @*/
513 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
514 {
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
519   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
520   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "PetscPartitionerViewFromOptions"
526 /*
527   PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed.
528 
529   Collective on PetscPartitioner
530 
531   Input Parameters:
532 + part   - the PetscPartitioner
533 . prefix - prefix to use for viewing, or NULL
534 - optionname - option to activate viewing
535 
536   Level: intermediate
537 
538 .keywords: PetscPartitioner, view, options, database
539 .seealso: VecViewFromOptions(), MatViewFromOptions()
540 */
541 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[])
542 {
543   PetscViewer       viewer;
544   PetscViewerFormat format;
545   PetscBool         flg;
546   PetscErrorCode    ierr;
547 
548   PetscFunctionBegin;
549   if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix,                       optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
550   else        {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
551   if (flg) {
552     ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr);
553     ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr);
554     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
555     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
556   }
557   PetscFunctionReturn(0);
558 }
559 #undef __FUNCT__
560 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
561 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
562 {
563   const char    *defaultType;
564   char           name[256];
565   PetscBool      flg;
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
570   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
571   else                                  defaultType = ((PetscObject) part)->type_name;
572   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
573 
574   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
575   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
576   if (flg) {
577     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
578   } else if (!((PetscObject) part)->type_name) {
579     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
580   }
581   ierr = PetscOptionsEnd();CHKERRQ(ierr);
582   PetscFunctionReturn(0);
583 }
584 
585 #undef __FUNCT__
586 #define __FUNCT__ "PetscPartitionerSetFromOptions"
587 /*@
588   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
589 
590   Collective on PetscPartitioner
591 
592   Input Parameter:
593 . part - the PetscPartitioner object to set options for
594 
595   Level: developer
596 
597 .seealso: PetscPartitionerView()
598 @*/
599 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
600 {
601   PetscErrorCode ierr;
602 
603   PetscFunctionBegin;
604   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
605   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
606 
607   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
608   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
609   /* process any options handlers added with PetscObjectAddOptionsHandler() */
610   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
611   ierr = PetscOptionsEnd();CHKERRQ(ierr);
612   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
613   PetscFunctionReturn(0);
614 }
615 
616 #undef __FUNCT__
617 #define __FUNCT__ "PetscPartitionerSetUp"
618 /*@C
619   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
620 
621   Collective on PetscPartitioner
622 
623   Input Parameter:
624 . part - the PetscPartitioner object to setup
625 
626   Level: developer
627 
628 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
629 @*/
630 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
631 {
632   PetscErrorCode ierr;
633 
634   PetscFunctionBegin;
635   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
636   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
637   PetscFunctionReturn(0);
638 }
639 
640 #undef __FUNCT__
641 #define __FUNCT__ "PetscPartitionerDestroy"
642 /*@
643   PetscPartitionerDestroy - Destroys a PetscPartitioner object
644 
645   Collective on PetscPartitioner
646 
647   Input Parameter:
648 . part - the PetscPartitioner object to destroy
649 
650   Level: developer
651 
652 .seealso: PetscPartitionerView()
653 @*/
654 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
655 {
656   PetscErrorCode ierr;
657 
658   PetscFunctionBegin;
659   if (!*part) PetscFunctionReturn(0);
660   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
661 
662   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
663   ((PetscObject) (*part))->refct = 0;
664 
665   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
666   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
667   PetscFunctionReturn(0);
668 }
669 
670 #undef __FUNCT__
671 #define __FUNCT__ "PetscPartitionerCreate"
672 /*@
673   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
674 
675   Collective on MPI_Comm
676 
677   Input Parameter:
678 . comm - The communicator for the PetscPartitioner object
679 
680   Output Parameter:
681 . part - The PetscPartitioner object
682 
683   Level: beginner
684 
685 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL
686 @*/
687 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
688 {
689   PetscPartitioner p;
690   PetscErrorCode   ierr;
691 
692   PetscFunctionBegin;
693   PetscValidPointer(part, 2);
694   *part = NULL;
695   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
696 
697   ierr = PetscHeaderCreate(p, _p_PetscPartitioner, struct _PetscPartitionerOps, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
698   ierr = PetscMemzero(p->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
699 
700   *part = p;
701   PetscFunctionReturn(0);
702 }
703 
704 #undef __FUNCT__
705 #define __FUNCT__ "PetscPartitionerPartition"
706 /*@
707   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
708 
709   Collective on DM
710 
711   Input Parameters:
712 + part    - The PetscPartitioner
713 - dm      - The mesh DM
714 
715   Output Parameters:
716 + partSection     - The PetscSection giving the division of points by partition
717 - partition       - The list of points by partition
718 
719   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
720 
721   Level: developer
722 
723 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
724 @*/
725 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
726 {
727   PetscMPIInt    size;
728   PetscErrorCode ierr;
729 
730   PetscFunctionBegin;
731   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
732   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
733   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
734   PetscValidPointer(partition, 5);
735   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
736   if (size == 1) {
737     PetscInt *points;
738     PetscInt  cStart, cEnd, c;
739 
740     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
741     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
742     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
743     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
744     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
745     for (c = cStart; c < cEnd; ++c) points[c] = c;
746     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
747     PetscFunctionReturn(0);
748   }
749   if (part->height == 0) {
750     PetscInt  numVertices;
751     PetscInt *start     = NULL;
752     PetscInt *adjacency = NULL;
753 
754     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
755     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
756     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
757     ierr = PetscFree(start);CHKERRQ(ierr);
758     ierr = PetscFree(adjacency);CHKERRQ(ierr);
759   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
765 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
766 {
767   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
768   PetscErrorCode          ierr;
769 
770   PetscFunctionBegin;
771   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
772   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
773   ierr = PetscFree(p);CHKERRQ(ierr);
774   PetscFunctionReturn(0);
775 }
776 
777 #undef __FUNCT__
778 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
779 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
780 {
781   PetscViewerFormat format;
782   PetscErrorCode    ierr;
783 
784   PetscFunctionBegin;
785   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
786   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "PetscPartitionerView_Shell"
792 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
793 {
794   PetscBool      iascii;
795   PetscErrorCode ierr;
796 
797   PetscFunctionBegin;
798   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
799   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
800   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
801   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
802   PetscFunctionReturn(0);
803 }
804 
805 #undef __FUNCT__
806 #define __FUNCT__ "PetscPartitionerPartition_Shell"
807 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
808 {
809   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
810   PetscInt                np;
811   PetscErrorCode          ierr;
812 
813   PetscFunctionBegin;
814   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
815   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
816   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
817   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
818   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
819   *partition = p->partition;
820   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
821   PetscFunctionReturn(0);
822 }
823 
824 #undef __FUNCT__
825 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
826 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
827 {
828   PetscFunctionBegin;
829   part->ops->view      = PetscPartitionerView_Shell;
830   part->ops->destroy   = PetscPartitionerDestroy_Shell;
831   part->ops->partition = PetscPartitionerPartition_Shell;
832   PetscFunctionReturn(0);
833 }
834 
835 /*MC
836   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
837 
838   Level: intermediate
839 
840 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
841 M*/
842 
843 #undef __FUNCT__
844 #define __FUNCT__ "PetscPartitionerCreate_Shell"
845 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
846 {
847   PetscPartitioner_Shell *p;
848   PetscErrorCode          ierr;
849 
850   PetscFunctionBegin;
851   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
852   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
853   part->data = p;
854 
855   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
856   PetscFunctionReturn(0);
857 }
858 
859 #undef __FUNCT__
860 #define __FUNCT__ "PetscPartitionerShellSetPartition"
861 /*@C
862   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
863 
864   Collective on PART
865 
866   Input Parameters:
867 + part     - The PetscPartitioner
868 . numProcs - The number of partitions
869 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
870 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
871 
872   Level: developer
873 
874   Notes:
875 
876     It is safe to free the sizes and points arrays after use in this routine.
877 
878 .seealso DMPlexDistribute(), PetscPartitionerCreate()
879 @*/
880 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
881 {
882   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
883   PetscInt                proc, numPoints;
884   PetscErrorCode          ierr;
885 
886   PetscFunctionBegin;
887   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
888   if (sizes) {PetscValidPointer(sizes, 3);}
889   if (sizes) {PetscValidPointer(points, 4);}
890   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
891   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
892   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
893   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
894   if (sizes) {
895     for (proc = 0; proc < numProcs; ++proc) {
896       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
897     }
898   }
899   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
900   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
901   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
907 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
908 {
909   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
910   PetscErrorCode          ierr;
911 
912   PetscFunctionBegin;
913   ierr = PetscFree(p);CHKERRQ(ierr);
914   PetscFunctionReturn(0);
915 }
916 
917 #undef __FUNCT__
918 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
919 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
920 {
921   PetscViewerFormat format;
922   PetscErrorCode    ierr;
923 
924   PetscFunctionBegin;
925   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
926   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
927   PetscFunctionReturn(0);
928 }
929 
930 #undef __FUNCT__
931 #define __FUNCT__ "PetscPartitionerView_Chaco"
932 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
933 {
934   PetscBool      iascii;
935   PetscErrorCode ierr;
936 
937   PetscFunctionBegin;
938   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
939   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
940   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
941   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
942   PetscFunctionReturn(0);
943 }
944 
945 #if defined(PETSC_HAVE_CHACO)
946 #if defined(PETSC_HAVE_UNISTD_H)
947 #include <unistd.h>
948 #endif
949 /* Chaco does not have an include file */
950 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
951                        float *ewgts, float *x, float *y, float *z, char *outassignname,
952                        char *outfilename, short *assignment, int architecture, int ndims_tot,
953                        int mesh_dims[3], double *goal, int global_method, int local_method,
954                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
955 
956 extern int FREE_GRAPH;
957 #endif
958 
959 #undef __FUNCT__
960 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
961 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
962 {
963 #if defined(PETSC_HAVE_CHACO)
964   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
965   MPI_Comm       comm;
966   int            nvtxs          = numVertices; /* number of vertices in full graph */
967   int           *vwgts          = NULL;   /* weights for all vertices */
968   float         *ewgts          = NULL;   /* weights for all edges */
969   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
970   char          *outassignname  = NULL;   /*  name of assignment output file */
971   char          *outfilename    = NULL;   /* output file name */
972   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
973   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
974   int            mesh_dims[3];            /* dimensions of mesh of processors */
975   double        *goal          = NULL;    /* desired set sizes for each set */
976   int            global_method = 1;       /* global partitioning algorithm */
977   int            local_method  = 1;       /* local partitioning algorithm */
978   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
979   int            vmax          = 200;     /* how many vertices to coarsen down to? */
980   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
981   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
982   long           seed          = 123636512; /* for random graph mutations */
983   short int     *assignment;              /* Output partition */
984   int            fd_stdout, fd_pipe[2];
985   PetscInt      *points;
986   int            i, v, p;
987   PetscErrorCode ierr;
988 
989   PetscFunctionBegin;
990   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
991   if (!numVertices) {
992     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
993     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
994     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
995     PetscFunctionReturn(0);
996   }
997   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
998   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
999 
1000   if (global_method == INERTIAL_METHOD) {
1001     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1002     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1003   }
1004   mesh_dims[0] = nparts;
1005   mesh_dims[1] = 1;
1006   mesh_dims[2] = 1;
1007   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1008   /* Chaco outputs to stdout. We redirect this to a buffer. */
1009   /* TODO: check error codes for UNIX calls */
1010 #if defined(PETSC_HAVE_UNISTD_H)
1011   {
1012     int piperet;
1013     piperet = pipe(fd_pipe);
1014     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1015     fd_stdout = dup(1);
1016     close(1);
1017     dup2(fd_pipe[1], 1);
1018   }
1019 #endif
1020   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1021                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1022                    vmax, ndims, eigtol, seed);
1023 #if defined(PETSC_HAVE_UNISTD_H)
1024   {
1025     char msgLog[10000];
1026     int  count;
1027 
1028     fflush(stdout);
1029     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1030     if (count < 0) count = 0;
1031     msgLog[count] = 0;
1032     close(1);
1033     dup2(fd_stdout, 1);
1034     close(fd_stdout);
1035     close(fd_pipe[0]);
1036     close(fd_pipe[1]);
1037     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1038   }
1039 #endif
1040   /* Convert to PetscSection+IS */
1041   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1042   for (v = 0; v < nvtxs; ++v) {
1043     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1044   }
1045   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1046   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1047   for (p = 0, i = 0; p < nparts; ++p) {
1048     for (v = 0; v < nvtxs; ++v) {
1049       if (assignment[v] == p) points[i++] = v;
1050     }
1051   }
1052   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1053   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1054   if (global_method == INERTIAL_METHOD) {
1055     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1056   }
1057   ierr = PetscFree(assignment);CHKERRQ(ierr);
1058   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1059   PetscFunctionReturn(0);
1060 #else
1061   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1062 #endif
1063 }
1064 
1065 #undef __FUNCT__
1066 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1067 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1068 {
1069   PetscFunctionBegin;
1070   part->ops->view      = PetscPartitionerView_Chaco;
1071   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1072   part->ops->partition = PetscPartitionerPartition_Chaco;
1073   PetscFunctionReturn(0);
1074 }
1075 
1076 /*MC
1077   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1078 
1079   Level: intermediate
1080 
1081 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1082 M*/
1083 
1084 #undef __FUNCT__
1085 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1086 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1087 {
1088   PetscPartitioner_Chaco *p;
1089   PetscErrorCode          ierr;
1090 
1091   PetscFunctionBegin;
1092   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1093   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1094   part->data = p;
1095 
1096   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1097   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1098   PetscFunctionReturn(0);
1099 }
1100 
1101 #undef __FUNCT__
1102 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1103 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1104 {
1105   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1106   PetscErrorCode             ierr;
1107 
1108   PetscFunctionBegin;
1109   ierr = PetscFree(p);CHKERRQ(ierr);
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 #undef __FUNCT__
1114 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1115 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1116 {
1117   PetscViewerFormat format;
1118   PetscErrorCode    ierr;
1119 
1120   PetscFunctionBegin;
1121   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1122   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1123   PetscFunctionReturn(0);
1124 }
1125 
1126 #undef __FUNCT__
1127 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1128 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1129 {
1130   PetscBool      iascii;
1131   PetscErrorCode ierr;
1132 
1133   PetscFunctionBegin;
1134   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1135   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1136   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1137   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1138   PetscFunctionReturn(0);
1139 }
1140 
1141 #if defined(PETSC_HAVE_PARMETIS)
1142 #include <parmetis.h>
1143 #endif
1144 
1145 #undef __FUNCT__
1146 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1147 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1148 {
1149 #if defined(PETSC_HAVE_PARMETIS)
1150   MPI_Comm       comm;
1151   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1152   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1153   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1154   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1155   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1156   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1157   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1158   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1159   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1160   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1161   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1162   PetscInt       options[5];               /* Options */
1163   /* Outputs */
1164   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1165   PetscInt      *assignment, *points;
1166   PetscMPIInt    rank, p, v, i;
1167   PetscErrorCode ierr;
1168 
1169   PetscFunctionBegin;
1170   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1171   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1172   options[0] = 0; /* Use all defaults */
1173   /* Calculate vertex distribution */
1174   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1175   vtxdist[0] = 0;
1176   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1177   for (p = 2; p <= nparts; ++p) {
1178     vtxdist[p] += vtxdist[p-1];
1179   }
1180   /* Calculate weights */
1181   for (p = 0; p < nparts; ++p) {
1182     tpwgts[p] = 1.0/nparts;
1183   }
1184   ubvec[0] = 1.05;
1185 
1186   if (nparts == 1) {
1187     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1188   } else {
1189     if (vtxdist[1] == vtxdist[nparts]) {
1190       if (!rank) {
1191         PetscStackPush("METIS_PartGraphKway");
1192         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1193         PetscStackPop;
1194         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1195       }
1196     } else {
1197       PetscStackPush("ParMETIS_V3_PartKway");
1198       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1199       PetscStackPop;
1200       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1201     }
1202   }
1203   /* Convert to PetscSection+IS */
1204   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1205   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1206   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1207   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1208   for (p = 0, i = 0; p < nparts; ++p) {
1209     for (v = 0; v < nvtxs; ++v) {
1210       if (assignment[v] == p) points[i++] = v;
1211     }
1212   }
1213   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1214   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1215   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1216 #else
1217   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1218 #endif
1219   PetscFunctionReturn(0);
1220 }
1221 
1222 #undef __FUNCT__
1223 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1224 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1225 {
1226   PetscFunctionBegin;
1227   part->ops->view      = PetscPartitionerView_ParMetis;
1228   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1229   part->ops->partition = PetscPartitionerPartition_ParMetis;
1230   PetscFunctionReturn(0);
1231 }
1232 
1233 /*MC
1234   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1235 
1236   Level: intermediate
1237 
1238 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1239 M*/
1240 
1241 #undef __FUNCT__
1242 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1243 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1244 {
1245   PetscPartitioner_ParMetis *p;
1246   PetscErrorCode          ierr;
1247 
1248   PetscFunctionBegin;
1249   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1250   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1251   part->data = p;
1252 
1253   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1254   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1255   PetscFunctionReturn(0);
1256 }
1257 
1258 #undef __FUNCT__
1259 #define __FUNCT__ "DMPlexGetPartitioner"
1260 /*@
1261   DMPlexGetPartitioner - Get the mesh partitioner
1262 
1263   Not collective
1264 
1265   Input Parameter:
1266 . dm - The DM
1267 
1268   Output Parameter:
1269 . part - The PetscPartitioner
1270 
1271   Level: developer
1272 
1273   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1274 
1275 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1276 @*/
1277 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1278 {
1279   DM_Plex *mesh = (DM_Plex *) dm->data;
1280 
1281   PetscFunctionBegin;
1282   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1283   PetscValidPointer(part, 2);
1284   *part = mesh->partitioner;
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 #undef __FUNCT__
1289 #define __FUNCT__ "DMPlexSetPartitioner"
1290 /*@
1291   DMPlexSetPartitioner - Set the mesh partitioner
1292 
1293   logically collective on dm and part
1294 
1295   Input Parameters:
1296 + dm - The DM
1297 - part - The partitioner
1298 
1299   Level: developer
1300 
1301   Note: Any existing PetscPartitioner will be destroyed.
1302 
1303 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1304 @*/
1305 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1306 {
1307   DM_Plex       *mesh = (DM_Plex *) dm->data;
1308   PetscErrorCode ierr;
1309 
1310   PetscFunctionBegin;
1311   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1312   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1313   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1314   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1315   mesh->partitioner = part;
1316   PetscFunctionReturn(0);
1317 }
1318 
1319 #undef __FUNCT__
1320 #define __FUNCT__ "DMPlexMarkTreeClosure"
1321 static PetscErrorCode DMPlexMarkTreeClosure(DM dm, PetscSegBuffer segpart, PetscBT bt, PetscInt point, PetscInt *partSize)
1322 {
1323   PetscInt       parent, closureSize, *closure = NULL, i, pStart, pEnd;
1324   PetscErrorCode ierr;
1325 
1326   PetscFunctionBegin;
1327   ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1328   if (parent == point) PetscFunctionReturn(0);
1329   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1330   ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1331   for (i = 0; i < closureSize; i++) {
1332     PetscInt cpoint = closure[2*i];
1333 
1334     if (!PetscBTLookupSet(bt,cpoint-pStart)) {
1335       PetscInt *PETSC_RESTRICT pt;
1336       (*partSize)++;
1337       ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr);
1338       *pt = cpoint;
1339     }
1340     ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,partSize);CHKERRQ(ierr);
1341   }
1342   ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 #undef __FUNCT__
1347 #define __FUNCT__ "DMPlexCreatePartitionClosure"
1348 PetscErrorCode DMPlexCreatePartitionClosure(DM dm, PetscSection pointSection, IS pointPartition, PetscSection *section, IS *partition)
1349 {
1350   /* const PetscInt  height = 0; */
1351   const PetscInt *partArray;
1352   PetscInt       *allPoints, *packPoints;
1353   PetscInt        rStart, rEnd, rank, pStart, pEnd, newSize;
1354   PetscErrorCode  ierr;
1355   PetscBT         bt;
1356   PetscSegBuffer  segpack,segpart;
1357 
1358   PetscFunctionBegin;
1359   ierr = PetscSectionGetChart(pointSection, &rStart, &rEnd);CHKERRQ(ierr);
1360   ierr = ISGetIndices(pointPartition, &partArray);CHKERRQ(ierr);
1361   ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), section);CHKERRQ(ierr);
1362   ierr = PetscSectionSetChart(*section, rStart, rEnd);CHKERRQ(ierr);
1363   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1364   ierr = PetscBTCreate(pEnd-pStart,&bt);CHKERRQ(ierr);
1365   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpack);CHKERRQ(ierr);
1366   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpart);CHKERRQ(ierr);
1367   for (rank = rStart; rank < rEnd; ++rank) {
1368     PetscInt partSize = 0, numPoints, offset, p, *PETSC_RESTRICT placePoints;
1369 
1370     ierr = PetscSectionGetDof(pointSection, rank, &numPoints);CHKERRQ(ierr);
1371     ierr = PetscSectionGetOffset(pointSection, rank, &offset);CHKERRQ(ierr);
1372     for (p = 0; p < numPoints; ++p) {
1373       PetscInt  point   = partArray[offset+p], closureSize, c;
1374       PetscInt *closure = NULL;
1375 
1376       /* TODO Include support for height > 0 case */
1377       ierr = DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1378       for (c=0; c<closureSize; c++) {
1379         PetscInt cpoint = closure[c*2];
1380         if (!PetscBTLookupSet(bt,cpoint-pStart)) {
1381           PetscInt *PETSC_RESTRICT pt;
1382           partSize++;
1383           ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr);
1384           *pt = cpoint;
1385         }
1386         ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,&partSize);CHKERRQ(ierr);
1387       }
1388       ierr = DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1389     }
1390     ierr = PetscSectionSetDof(*section, rank, partSize);CHKERRQ(ierr);
1391     ierr = PetscSegBufferGetInts(segpack,partSize,&placePoints);CHKERRQ(ierr);
1392     ierr = PetscSegBufferExtractTo(segpart,placePoints);CHKERRQ(ierr);
1393     ierr = PetscSortInt(partSize,placePoints);CHKERRQ(ierr);
1394     for (p=0; p<partSize; p++) {ierr = PetscBTClear(bt,placePoints[p]-pStart);CHKERRQ(ierr);}
1395   }
1396   ierr = PetscBTDestroy(&bt);CHKERRQ(ierr);
1397   ierr = PetscSegBufferDestroy(&segpart);CHKERRQ(ierr);
1398 
1399   ierr = PetscSectionSetUp(*section);CHKERRQ(ierr);
1400   ierr = PetscSectionGetStorageSize(*section, &newSize);CHKERRQ(ierr);
1401   ierr = PetscMalloc1(newSize, &allPoints);CHKERRQ(ierr);
1402 
1403   ierr = PetscSegBufferExtractInPlace(segpack,&packPoints);CHKERRQ(ierr);
1404   for (rank = rStart; rank < rEnd; ++rank) {
1405     PetscInt numPoints, offset;
1406 
1407     ierr = PetscSectionGetDof(*section, rank, &numPoints);CHKERRQ(ierr);
1408     ierr = PetscSectionGetOffset(*section, rank, &offset);CHKERRQ(ierr);
1409     ierr = PetscMemcpy(&allPoints[offset], packPoints, numPoints * sizeof(PetscInt));CHKERRQ(ierr);
1410     packPoints += numPoints;
1411   }
1412 
1413   ierr = PetscSegBufferDestroy(&segpack);CHKERRQ(ierr);
1414   ierr = ISRestoreIndices(pointPartition, &partArray);CHKERRQ(ierr);
1415   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), newSize, allPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1416   PetscFunctionReturn(0);
1417 }
1418 
1419 #undef __FUNCT__
1420 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1421 /*@
1422   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1423 
1424   Input Parameters:
1425 + dm     - The DM
1426 - label  - DMLabel assinging ranks to remote roots
1427 
1428   Level: developer
1429 
1430 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1431 @*/
1432 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1433 {
1434   IS              rankIS,   pointIS;
1435   const PetscInt *ranks,   *points;
1436   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1437   PetscInt       *closure = NULL;
1438   PetscErrorCode  ierr;
1439 
1440   PetscFunctionBegin;
1441   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1442   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1443   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1444   for (r = 0; r < numRanks; ++r) {
1445     const PetscInt rank = ranks[r];
1446 
1447     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1448     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1449     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1450     for (p = 0; p < numPoints; ++p) {
1451       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1452       for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);}
1453     }
1454     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1455     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1456   }
1457   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1458   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1459   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1460   PetscFunctionReturn(0);
1461 }
1462 
1463 #undef __FUNCT__
1464 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1465 /*@
1466   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1467 
1468   Input Parameters:
1469 + dm     - The DM
1470 - label  - DMLabel assinging ranks to remote roots
1471 
1472   Level: developer
1473 
1474 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1475 @*/
1476 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1477 {
1478   IS              rankIS,   pointIS;
1479   const PetscInt *ranks,   *points;
1480   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1481   PetscInt       *adj = NULL;
1482   PetscErrorCode  ierr;
1483 
1484   PetscFunctionBegin;
1485   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1486   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1487   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1488   for (r = 0; r < numRanks; ++r) {
1489     const PetscInt rank = ranks[r];
1490 
1491     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1492     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1493     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1494     for (p = 0; p < numPoints; ++p) {
1495       adjSize = PETSC_DETERMINE;
1496       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1497       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1498     }
1499     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1500     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1501   }
1502   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1503   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1504   ierr = PetscFree(adj);CHKERRQ(ierr);
1505   PetscFunctionReturn(0);
1506 }
1507 
1508 #undef __FUNCT__
1509 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1510 /*@
1511   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1512 
1513   Input Parameters:
1514 + dm        - The DM
1515 . rootLabel - DMLabel assinging ranks to local roots
1516 . processSF - A star forest mapping into the local index on each remote rank
1517 
1518   Output Parameter:
1519 - leafLabel - DMLabel assinging ranks to remote roots
1520 
1521   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1522   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1523 
1524   Level: developer
1525 
1526 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1527 @*/
1528 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1529 {
1530   MPI_Comm           comm;
1531   PetscMPIInt        rank, numProcs;
1532   PetscInt           p, n, numNeighbors, size, l, nleaves;
1533   PetscSF            sfPoint;
1534   PetscSFNode       *rootPoints, *leafPoints;
1535   PetscSection       rootSection, leafSection;
1536   const PetscSFNode *remote;
1537   const PetscInt    *local, *neighbors;
1538   IS                 valueIS;
1539   PetscErrorCode     ierr;
1540 
1541   PetscFunctionBegin;
1542   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1543   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1544   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1545   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1546 
1547   /* Convert to (point, rank) and use actual owners */
1548   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1549   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1550   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1551   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1552   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1553   for (n = 0; n < numNeighbors; ++n) {
1554     PetscInt numPoints;
1555 
1556     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1557     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1558   }
1559   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1560   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1561   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1562   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1563   for (n = 0; n < numNeighbors; ++n) {
1564     IS              pointIS;
1565     const PetscInt *points;
1566     PetscInt        off, numPoints, p;
1567 
1568     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1569     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1570     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1571     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1572     for (p = 0; p < numPoints; ++p) {
1573       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1574       else       {l = -1;}
1575       if (l >= 0) {rootPoints[off+p] = remote[l];}
1576       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1577     }
1578     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1579     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1580   }
1581   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1582   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1583   /* Communicate overlap */
1584   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1585   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1586   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1587   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1588   for (p = 0; p < size; p++) {
1589     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1590   }
1591   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1592   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1593   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1594   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1595   PetscFunctionReturn(0);
1596 }
1597 
1598 #undef __FUNCT__
1599 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1600 /*@
1601   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1602 
1603   Input Parameters:
1604 + dm    - The DM
1605 . label - DMLabel assinging ranks to remote roots
1606 
1607   Output Parameter:
1608 - sf    - The star forest communication context encapsulating the defined mapping
1609 
1610   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1611 
1612   Level: developer
1613 
1614 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1615 @*/
1616 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1617 {
1618   PetscMPIInt     rank, numProcs;
1619   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1620   PetscSFNode    *remotePoints;
1621   IS              remoteRootIS;
1622   const PetscInt *remoteRoots;
1623   PetscErrorCode ierr;
1624 
1625   PetscFunctionBegin;
1626   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1627   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1628 
1629   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1630     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1631     numRemote += numPoints;
1632   }
1633   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1634   /* Put owned points first */
1635   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1636   if (numPoints > 0) {
1637     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1638     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1639     for (p = 0; p < numPoints; p++) {
1640       remotePoints[idx].index = remoteRoots[p];
1641       remotePoints[idx].rank = rank;
1642       idx++;
1643     }
1644     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1645     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1646   }
1647   /* Now add remote points */
1648   for (n = 0; n < numProcs; ++n) {
1649     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1650     if (numPoints <= 0 || n == rank) continue;
1651     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1652     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1653     for (p = 0; p < numPoints; p++) {
1654       remotePoints[idx].index = remoteRoots[p];
1655       remotePoints[idx].rank = n;
1656       idx++;
1657     }
1658     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1659     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1660   }
1661   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1662   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1663   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1664   PetscFunctionReturn(0);
1665 }
1666