xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 4e97f8ebb84dd841ee7d0d48926a4de5d5170225)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
31 
32 /*@C
33   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
34 
35   Input Parameters:
36 + dm      - The mesh DM dm
37 - height  - Height of the strata from which to construct the graph
38 
39   Output Parameter:
40 + numVertices     - Number of vertices in the graph
41 . offsets         - Point offsets in the graph
42 . adjacency       - Point connectivity in the graph
43 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
44 
45   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
46   representation on the mesh.
47 
48   Level: developer
49 
50 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
51 @*/
52 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
53 {
54   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
55   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
56   IS             cellNumbering;
57   const PetscInt *cellNum;
58   PetscBool      useCone, useClosure;
59   PetscSection   section;
60   PetscSegBuffer adjBuffer;
61   PetscSF        sfPoint;
62   PetscInt       *adjCells = NULL, *remoteCells = NULL;
63   const PetscInt *local;
64   PetscInt       nroots, nleaves, l;
65   PetscMPIInt    rank;
66   PetscErrorCode ierr;
67 
68   PetscFunctionBegin;
69   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
70   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
71   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
72   if (dim != depth) {
73     /* We do not handle the uninterpolated case here */
74     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
75     /* DMPlexCreateNeighborCSR does not make a numbering */
76     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
77     /* Different behavior for empty graphs */
78     if (!*numVertices) {
79       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
80       (*offsets)[0] = 0;
81     }
82     /* Broken in parallel */
83     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
84     PetscFunctionReturn(0);
85   }
86   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
87   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
88   /* Build adjacency graph via a section/segbuffer */
89   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
90   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
91   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
92   /* Always use FVM adjacency to create partitioner graph */
93   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
94   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
95   ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
96   if (globalNumbering) {
97     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
98     *globalNumbering = cellNumbering;
99   }
100   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
101   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
102      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
103    */
104   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
105   if (nroots >= 0) {
106     PetscInt fStart, fEnd, f;
107 
108     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
109     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
110     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
111     for (f = fStart; f < fEnd; ++f) {
112       const PetscInt *support;
113       PetscInt        supportSize;
114 
115       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
116       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
117       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
118       else if (supportSize == 2) {
119         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
120         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
121         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
122         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
123       }
124       /* Handle non-conforming meshes */
125       if (supportSize > 2) {
126         PetscInt        numChildren, i;
127         const PetscInt *children;
128 
129         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
130         for (i = 0; i < numChildren; ++i) {
131           const PetscInt child = children[i];
132           if (fStart <= child && child < fEnd) {
133             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
134             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
135             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
136             else if (supportSize == 2) {
137               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
138               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
139               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
140               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
141             }
142           }
143         }
144       }
145     }
146     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
147     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
148     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
149     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
150     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
151   }
152   /* Combine local and global adjacencies */
153   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
154     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
155     if (nroots > 0) {if (cellNum[p] < 0) continue;}
156     /* Add remote cells */
157     if (remoteCells) {
158       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
159       PetscInt       coneSize, numChildren, c, i;
160       const PetscInt *cone, *children;
161 
162       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
163       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
164       for (c = 0; c < coneSize; ++c) {
165         const PetscInt point = cone[c];
166         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
167           PetscInt *PETSC_RESTRICT pBuf;
168           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
169           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
170           *pBuf = remoteCells[point];
171         }
172         /* Handle non-conforming meshes */
173         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
174         for (i = 0; i < numChildren; ++i) {
175           const PetscInt child = children[i];
176           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
177             PetscInt *PETSC_RESTRICT pBuf;
178             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
179             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
180             *pBuf = remoteCells[child];
181           }
182         }
183       }
184     }
185     /* Add local cells */
186     adjSize = PETSC_DETERMINE;
187     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
188     for (a = 0; a < adjSize; ++a) {
189       const PetscInt point = adj[a];
190       if (point != p && pStart <= point && point < pEnd) {
191         PetscInt *PETSC_RESTRICT pBuf;
192         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
193         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
194         *pBuf = DMPlex_GlobalID(cellNum[point]);
195       }
196     }
197     (*numVertices)++;
198   }
199   ierr = PetscFree(adj);CHKERRQ(ierr);
200   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
201   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
202 
203   /* Derive CSR graph from section/segbuffer */
204   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
205   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
206   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
207   for (idx = 0, p = pStart; p < pEnd; p++) {
208     if (nroots > 0) {if (cellNum[p] < 0) continue;}
209     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
210   }
211   vOffsets[*numVertices] = size;
212   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
213 
214   if (nroots >= 0) {
215     /* Filter out duplicate edges using section/segbuffer */
216     ierr = PetscSectionReset(section);CHKERRQ(ierr);
217     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
218     for (p = 0; p < *numVertices; p++) {
219       PetscInt start = vOffsets[p], end = vOffsets[p+1];
220       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
221       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
222       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
223       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
224       ierr = PetscMemcpy(edges, &graph[start], numEdges*sizeof(*edges));CHKERRQ(ierr);
225     }
226     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
227     ierr = PetscFree(graph);CHKERRQ(ierr);
228     /* Derive CSR graph from section/segbuffer */
229     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
230     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
231     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
232     for (idx = 0, p = 0; p < *numVertices; p++) {
233       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
234     }
235     vOffsets[*numVertices] = size;
236     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
237   } else {
238     /* Sort adjacencies (not strictly necessary) */
239     for (p = 0; p < *numVertices; p++) {
240       PetscInt start = vOffsets[p], end = vOffsets[p+1];
241       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
242     }
243   }
244 
245   if (offsets) *offsets = vOffsets;
246   if (adjacency) *adjacency = graph;
247 
248   /* Cleanup */
249   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
250   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
251   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
252   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 /*@C
257   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
258 
259   Collective
260 
261   Input Arguments:
262 + dm - The DMPlex
263 - cellHeight - The height of mesh points to treat as cells (default should be 0)
264 
265   Output Arguments:
266 + numVertices - The number of local vertices in the graph, or cells in the mesh.
267 . offsets     - The offset to the adjacency list for each cell
268 - adjacency   - The adjacency list for all cells
269 
270   Note: This is suitable for input to a mesh partitioner like ParMetis.
271 
272   Level: advanced
273 
274 .seealso: DMPlexCreate()
275 @*/
276 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
277 {
278   const PetscInt maxFaceCases = 30;
279   PetscInt       numFaceCases = 0;
280   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
281   PetscInt      *off, *adj;
282   PetscInt      *neighborCells = NULL;
283   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
284   PetscErrorCode ierr;
285 
286   PetscFunctionBegin;
287   /* For parallel partitioning, I think you have to communicate supports */
288   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
289   cellDim = dim - cellHeight;
290   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
291   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
292   if (cEnd - cStart == 0) {
293     if (numVertices) *numVertices = 0;
294     if (offsets)   *offsets   = NULL;
295     if (adjacency) *adjacency = NULL;
296     PetscFunctionReturn(0);
297   }
298   numCells  = cEnd - cStart;
299   faceDepth = depth - cellHeight;
300   if (dim == depth) {
301     PetscInt f, fStart, fEnd;
302 
303     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
304     /* Count neighboring cells */
305     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
306     for (f = fStart; f < fEnd; ++f) {
307       const PetscInt *support;
308       PetscInt        supportSize;
309       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
310       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
311       if (supportSize == 2) {
312         PetscInt numChildren;
313 
314         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
315         if (!numChildren) {
316           ++off[support[0]-cStart+1];
317           ++off[support[1]-cStart+1];
318         }
319       }
320     }
321     /* Prefix sum */
322     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
323     if (adjacency) {
324       PetscInt *tmp;
325 
326       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
327       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
328       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
329       /* Get neighboring cells */
330       for (f = fStart; f < fEnd; ++f) {
331         const PetscInt *support;
332         PetscInt        supportSize;
333         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
334         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
335         if (supportSize == 2) {
336           PetscInt numChildren;
337 
338           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
339           if (!numChildren) {
340             adj[tmp[support[0]-cStart]++] = support[1];
341             adj[tmp[support[1]-cStart]++] = support[0];
342           }
343         }
344       }
345 #if defined(PETSC_USE_DEBUG)
346       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
347 #endif
348       ierr = PetscFree(tmp);CHKERRQ(ierr);
349     }
350     if (numVertices) *numVertices = numCells;
351     if (offsets)   *offsets   = off;
352     if (adjacency) *adjacency = adj;
353     PetscFunctionReturn(0);
354   }
355   /* Setup face recognition */
356   if (faceDepth == 1) {
357     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
358 
359     for (c = cStart; c < cEnd; ++c) {
360       PetscInt corners;
361 
362       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
363       if (!cornersSeen[corners]) {
364         PetscInt nFV;
365 
366         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
367         cornersSeen[corners] = 1;
368 
369         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
370 
371         numFaceVertices[numFaceCases++] = nFV;
372       }
373     }
374   }
375   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
376   /* Count neighboring cells */
377   for (cell = cStart; cell < cEnd; ++cell) {
378     PetscInt numNeighbors = PETSC_DETERMINE, n;
379 
380     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
381     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
382     for (n = 0; n < numNeighbors; ++n) {
383       PetscInt        cellPair[2];
384       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
385       PetscInt        meetSize = 0;
386       const PetscInt *meet    = NULL;
387 
388       cellPair[0] = cell; cellPair[1] = neighborCells[n];
389       if (cellPair[0] == cellPair[1]) continue;
390       if (!found) {
391         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
392         if (meetSize) {
393           PetscInt f;
394 
395           for (f = 0; f < numFaceCases; ++f) {
396             if (numFaceVertices[f] == meetSize) {
397               found = PETSC_TRUE;
398               break;
399             }
400           }
401         }
402         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
403       }
404       if (found) ++off[cell-cStart+1];
405     }
406   }
407   /* Prefix sum */
408   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
409 
410   if (adjacency) {
411     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
412     /* Get neighboring cells */
413     for (cell = cStart; cell < cEnd; ++cell) {
414       PetscInt numNeighbors = PETSC_DETERMINE, n;
415       PetscInt cellOffset   = 0;
416 
417       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
418       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
419       for (n = 0; n < numNeighbors; ++n) {
420         PetscInt        cellPair[2];
421         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
422         PetscInt        meetSize = 0;
423         const PetscInt *meet    = NULL;
424 
425         cellPair[0] = cell; cellPair[1] = neighborCells[n];
426         if (cellPair[0] == cellPair[1]) continue;
427         if (!found) {
428           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
429           if (meetSize) {
430             PetscInt f;
431 
432             for (f = 0; f < numFaceCases; ++f) {
433               if (numFaceVertices[f] == meetSize) {
434                 found = PETSC_TRUE;
435                 break;
436               }
437             }
438           }
439           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
440         }
441         if (found) {
442           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
443           ++cellOffset;
444         }
445       }
446     }
447   }
448   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
449   if (numVertices) *numVertices = numCells;
450   if (offsets)   *offsets   = off;
451   if (adjacency) *adjacency = adj;
452   PetscFunctionReturn(0);
453 }
454 
455 /*@C
456   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
457 
458   Not Collective
459 
460   Input Parameters:
461 + name        - The name of a new user-defined creation routine
462 - create_func - The creation routine itself
463 
464   Notes:
465   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
466 
467   Sample usage:
468 .vb
469     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
470 .ve
471 
472   Then, your PetscPartitioner type can be chosen with the procedural interface via
473 .vb
474     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
475     PetscPartitionerSetType(PetscPartitioner, "my_part");
476 .ve
477    or at runtime via the option
478 .vb
479     -petscpartitioner_type my_part
480 .ve
481 
482   Level: advanced
483 
484 .keywords: PetscPartitioner, register
485 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
486 
487 @*/
488 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
489 {
490   PetscErrorCode ierr;
491 
492   PetscFunctionBegin;
493   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
494   PetscFunctionReturn(0);
495 }
496 
497 /*@C
498   PetscPartitionerSetType - Builds a particular PetscPartitioner
499 
500   Collective on PetscPartitioner
501 
502   Input Parameters:
503 + part - The PetscPartitioner object
504 - name - The kind of partitioner
505 
506   Options Database Key:
507 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
508 
509   Level: intermediate
510 
511 .keywords: PetscPartitioner, set, type
512 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
513 @*/
514 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
515 {
516   PetscErrorCode (*r)(PetscPartitioner);
517   PetscBool      match;
518   PetscErrorCode ierr;
519 
520   PetscFunctionBegin;
521   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
522   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
523   if (match) PetscFunctionReturn(0);
524 
525   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
526   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
527   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
528 
529   if (part->ops->destroy) {
530     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
531   }
532   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
533   ierr = (*r)(part);CHKERRQ(ierr);
534   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
535   PetscFunctionReturn(0);
536 }
537 
538 /*@C
539   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
540 
541   Not Collective
542 
543   Input Parameter:
544 . part - The PetscPartitioner
545 
546   Output Parameter:
547 . name - The PetscPartitioner type name
548 
549   Level: intermediate
550 
551 .keywords: PetscPartitioner, get, type, name
552 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
553 @*/
554 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
555 {
556   PetscErrorCode ierr;
557 
558   PetscFunctionBegin;
559   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
560   PetscValidPointer(name, 2);
561   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
562   *name = ((PetscObject) part)->type_name;
563   PetscFunctionReturn(0);
564 }
565 
566 /*@C
567   PetscPartitionerView - Views a PetscPartitioner
568 
569   Collective on PetscPartitioner
570 
571   Input Parameter:
572 + part - the PetscPartitioner object to view
573 - v    - the viewer
574 
575   Level: developer
576 
577 .seealso: PetscPartitionerDestroy()
578 @*/
579 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
580 {
581   PetscMPIInt    size;
582   PetscBool      isascii;
583   PetscErrorCode ierr;
584 
585   PetscFunctionBegin;
586   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
587   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
588   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
589   if (isascii) {
590     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
591     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
592     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
593     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
594     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
595     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
596     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
597   }
598   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
599   PetscFunctionReturn(0);
600 }
601 
602 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
603 {
604   PetscFunctionBegin;
605   if (!currentType) {
606 #if defined(PETSC_HAVE_CHACO)
607     *defaultType = PETSCPARTITIONERCHACO;
608 #elif defined(PETSC_HAVE_PARMETIS)
609     *defaultType = PETSCPARTITIONERPARMETIS;
610 #elif defined(PETSC_HAVE_PTSCOTCH)
611     *defaultType = PETSCPARTITIONERPTSCOTCH;
612 #else
613     *defaultType = PETSCPARTITIONERSIMPLE;
614 #endif
615   } else {
616     *defaultType = currentType;
617   }
618   PetscFunctionReturn(0);
619 }
620 
621 /*@
622   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
623 
624   Collective on PetscPartitioner
625 
626   Input Parameter:
627 . part - the PetscPartitioner object to set options for
628 
629   Level: developer
630 
631 .seealso: PetscPartitionerView()
632 @*/
633 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
634 {
635   const char    *defaultType;
636   char           name[256];
637   PetscBool      flg;
638   PetscErrorCode ierr;
639 
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
642   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
643   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
644   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
645   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
646   if (flg) {
647     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
648   } else if (!((PetscObject) part)->type_name) {
649     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
650   }
651   if (part->ops->setfromoptions) {
652     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
653   }
654   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
655   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
656   /* process any options handlers added with PetscObjectAddOptionsHandler() */
657   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
658   ierr = PetscOptionsEnd();CHKERRQ(ierr);
659   PetscFunctionReturn(0);
660 }
661 
662 /*@C
663   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
664 
665   Collective on PetscPartitioner
666 
667   Input Parameter:
668 . part - the PetscPartitioner object to setup
669 
670   Level: developer
671 
672 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
673 @*/
674 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
675 {
676   PetscErrorCode ierr;
677 
678   PetscFunctionBegin;
679   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
680   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
681   PetscFunctionReturn(0);
682 }
683 
684 /*@
685   PetscPartitionerDestroy - Destroys a PetscPartitioner object
686 
687   Collective on PetscPartitioner
688 
689   Input Parameter:
690 . part - the PetscPartitioner object to destroy
691 
692   Level: developer
693 
694 .seealso: PetscPartitionerView()
695 @*/
696 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
697 {
698   PetscErrorCode ierr;
699 
700   PetscFunctionBegin;
701   if (!*part) PetscFunctionReturn(0);
702   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
703 
704   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
705   ((PetscObject) (*part))->refct = 0;
706 
707   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
708   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
709   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
710   PetscFunctionReturn(0);
711 }
712 
713 /*@
714   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
715 
716   Collective on MPI_Comm
717 
718   Input Parameter:
719 . comm - The communicator for the PetscPartitioner object
720 
721   Output Parameter:
722 . part - The PetscPartitioner object
723 
724   Level: beginner
725 
726 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
727 @*/
728 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
729 {
730   PetscPartitioner p;
731   const char       *partitionerType = NULL;
732   PetscErrorCode   ierr;
733 
734   PetscFunctionBegin;
735   PetscValidPointer(part, 2);
736   *part = NULL;
737   ierr = DMInitializePackage();CHKERRQ(ierr);
738 
739   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
740   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
741   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
742 
743   p->edgeCut = 0;
744   p->balance = 0.0;
745 
746   *part = p;
747   PetscFunctionReturn(0);
748 }
749 
750 /*@
751   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
752 
753   Collective on DM
754 
755   Input Parameters:
756 + part    - The PetscPartitioner
757 - dm      - The mesh DM
758 
759   Output Parameters:
760 + partSection     - The PetscSection giving the division of points by partition
761 - partition       - The list of points by partition
762 
763   Options Database:
764 . -petscpartitioner_view_graph - View the graph we are partitioning
765 
766   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
767 
768   Level: developer
769 
770 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
771 @*/
772 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
773 {
774   PetscMPIInt    size;
775   PetscErrorCode ierr;
776 
777   PetscFunctionBegin;
778   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
779   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
780   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
781   PetscValidPointer(partition, 5);
782   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
783   if (size == 1) {
784     PetscInt *points;
785     PetscInt  cStart, cEnd, c;
786 
787     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
788     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
789     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
790     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
791     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
792     for (c = cStart; c < cEnd; ++c) points[c] = c;
793     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
794     PetscFunctionReturn(0);
795   }
796   if (part->height == 0) {
797     PetscInt numVertices;
798     PetscInt *start     = NULL;
799     PetscInt *adjacency = NULL;
800     IS       globalNumbering;
801 
802     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
803     if (part->viewGraph) {
804       PetscViewer viewer = part->viewerGraph;
805       PetscBool   isascii;
806       PetscInt    v, i;
807       PetscMPIInt rank;
808 
809       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
810       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
811       if (isascii) {
812         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
813         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
814         for (v = 0; v < numVertices; ++v) {
815           const PetscInt s = start[v];
816           const PetscInt e = start[v+1];
817 
818           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
819           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
820           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
821         }
822         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
823         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
824       }
825     }
826     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
827     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
828     ierr = PetscFree(start);CHKERRQ(ierr);
829     ierr = PetscFree(adjacency);CHKERRQ(ierr);
830     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
831       const PetscInt *globalNum;
832       const PetscInt *partIdx;
833       PetscInt *map, cStart, cEnd;
834       PetscInt *adjusted, i, localSize, offset;
835       IS    newPartition;
836 
837       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
838       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
839       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
840       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
841       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
842       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
843       for (i = cStart, offset = 0; i < cEnd; i++) {
844         if (globalNum[i - cStart] >= 0) map[offset++] = i;
845       }
846       for (i = 0; i < localSize; i++) {
847         adjusted[i] = map[partIdx[i]];
848       }
849       ierr = PetscFree(map);CHKERRQ(ierr);
850       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
851       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
852       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
853       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
854       ierr = ISDestroy(partition);CHKERRQ(ierr);
855       *partition = newPartition;
856     }
857   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
858   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 
862 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
863 {
864   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
865   PetscErrorCode          ierr;
866 
867   PetscFunctionBegin;
868   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
869   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
870   ierr = PetscFree(p);CHKERRQ(ierr);
871   PetscFunctionReturn(0);
872 }
873 
874 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
875 {
876   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
877   PetscErrorCode          ierr;
878 
879   PetscFunctionBegin;
880   if (p->random) {
881     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
882     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
883     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
884   }
885   PetscFunctionReturn(0);
886 }
887 
888 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
889 {
890   PetscBool      iascii;
891   PetscErrorCode ierr;
892 
893   PetscFunctionBegin;
894   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
895   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
896   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
897   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
898   PetscFunctionReturn(0);
899 }
900 
901 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
902 {
903   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
904   PetscErrorCode          ierr;
905 
906   PetscFunctionBegin;
907   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
908   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
909   ierr = PetscOptionsTail();CHKERRQ(ierr);
910   PetscFunctionReturn(0);
911 }
912 
913 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
914 {
915   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
916   PetscInt                np;
917   PetscErrorCode          ierr;
918 
919   PetscFunctionBegin;
920   if (p->random) {
921     PetscRandom r;
922     PetscInt   *sizes, *points, v, p;
923     PetscMPIInt rank;
924 
925     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
926     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
927     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
928     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
929     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
930     for (v = 0; v < numVertices; ++v) {points[v] = v;}
931     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
932     for (v = numVertices-1; v > 0; --v) {
933       PetscReal val;
934       PetscInt  w, tmp;
935 
936       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
937       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
938       w    = PetscFloorReal(val);
939       tmp       = points[v];
940       points[v] = points[w];
941       points[w] = tmp;
942     }
943     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
944     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
945     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
946   }
947   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
948   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
949   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
950   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
951   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
952   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
953   *partition = p->partition;
954   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957 
958 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
959 {
960   PetscFunctionBegin;
961   part->ops->view           = PetscPartitionerView_Shell;
962   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
963   part->ops->destroy        = PetscPartitionerDestroy_Shell;
964   part->ops->partition      = PetscPartitionerPartition_Shell;
965   PetscFunctionReturn(0);
966 }
967 
968 /*MC
969   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
970 
971   Level: intermediate
972 
973 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
974 M*/
975 
976 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
977 {
978   PetscPartitioner_Shell *p;
979   PetscErrorCode          ierr;
980 
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
983   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
984   part->data = p;
985 
986   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
987   p->random = PETSC_FALSE;
988   PetscFunctionReturn(0);
989 }
990 
991 /*@C
992   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
993 
994   Collective on Part
995 
996   Input Parameters:
997 + part     - The PetscPartitioner
998 . size - The number of partitions
999 . sizes    - array of size size (or NULL) providing the number of points in each partition
1000 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1001 
1002   Level: developer
1003 
1004   Notes:
1005 
1006     It is safe to free the sizes and points arrays after use in this routine.
1007 
1008 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1009 @*/
1010 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1011 {
1012   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1013   PetscInt                proc, numPoints;
1014   PetscErrorCode          ierr;
1015 
1016   PetscFunctionBegin;
1017   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1018   if (sizes)  {PetscValidPointer(sizes, 3);}
1019   if (points) {PetscValidPointer(points, 4);}
1020   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1021   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1022   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1023   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1024   if (sizes) {
1025     for (proc = 0; proc < size; ++proc) {
1026       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1027     }
1028   }
1029   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1030   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1031   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 /*@
1036   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1037 
1038   Collective on Part
1039 
1040   Input Parameters:
1041 + part   - The PetscPartitioner
1042 - random - The flag to use a random partition
1043 
1044   Level: intermediate
1045 
1046 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1047 @*/
1048 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1049 {
1050   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1051 
1052   PetscFunctionBegin;
1053   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1054   p->random = random;
1055   PetscFunctionReturn(0);
1056 }
1057 
1058 /*@
1059   PetscPartitionerShellGetRandom - get the flag to use a random partition
1060 
1061   Collective on Part
1062 
1063   Input Parameter:
1064 . part   - The PetscPartitioner
1065 
1066   Output Parameter
1067 . random - The flag to use a random partition
1068 
1069   Level: intermediate
1070 
1071 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1072 @*/
1073 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1074 {
1075   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1076 
1077   PetscFunctionBegin;
1078   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1079   PetscValidPointer(random, 2);
1080   *random = p->random;
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1085 {
1086   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1087   PetscErrorCode          ierr;
1088 
1089   PetscFunctionBegin;
1090   ierr = PetscFree(p);CHKERRQ(ierr);
1091   PetscFunctionReturn(0);
1092 }
1093 
1094 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1095 {
1096   PetscFunctionBegin;
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1101 {
1102   PetscBool      iascii;
1103   PetscErrorCode ierr;
1104 
1105   PetscFunctionBegin;
1106   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1107   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1108   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1109   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1114 {
1115   MPI_Comm       comm;
1116   PetscInt       np;
1117   PetscMPIInt    size;
1118   PetscErrorCode ierr;
1119 
1120   PetscFunctionBegin;
1121   comm = PetscObjectComm((PetscObject)part);
1122   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1123   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1124   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1125   if (size == 1) {
1126     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1127   } else {
1128     PetscMPIInt rank;
1129     PetscInt nvGlobal, *offsets, myFirst, myLast;
1130 
1131     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1132     offsets[0] = 0;
1133     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1134     for (np = 2; np <= size; np++) {
1135       offsets[np] += offsets[np-1];
1136     }
1137     nvGlobal = offsets[size];
1138     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1139     myFirst = offsets[rank];
1140     myLast  = offsets[rank + 1] - 1;
1141     ierr = PetscFree(offsets);CHKERRQ(ierr);
1142     if (numVertices) {
1143       PetscInt firstPart = 0, firstLargePart = 0;
1144       PetscInt lastPart = 0, lastLargePart = 0;
1145       PetscInt rem = nvGlobal % nparts;
1146       PetscInt pSmall = nvGlobal/nparts;
1147       PetscInt pBig = nvGlobal/nparts + 1;
1148 
1149 
1150       if (rem) {
1151         firstLargePart = myFirst / pBig;
1152         lastLargePart  = myLast  / pBig;
1153 
1154         if (firstLargePart < rem) {
1155           firstPart = firstLargePart;
1156         } else {
1157           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1158         }
1159         if (lastLargePart < rem) {
1160           lastPart = lastLargePart;
1161         } else {
1162           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1163         }
1164       } else {
1165         firstPart = myFirst / (nvGlobal/nparts);
1166         lastPart  = myLast  / (nvGlobal/nparts);
1167       }
1168 
1169       for (np = firstPart; np <= lastPart; np++) {
1170         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1171         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1172 
1173         PartStart = PetscMax(PartStart,myFirst);
1174         PartEnd   = PetscMin(PartEnd,myLast+1);
1175         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1176       }
1177     }
1178   }
1179   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1180   PetscFunctionReturn(0);
1181 }
1182 
1183 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1184 {
1185   PetscFunctionBegin;
1186   part->ops->view      = PetscPartitionerView_Simple;
1187   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1188   part->ops->partition = PetscPartitionerPartition_Simple;
1189   PetscFunctionReturn(0);
1190 }
1191 
1192 /*MC
1193   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1194 
1195   Level: intermediate
1196 
1197 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1198 M*/
1199 
1200 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1201 {
1202   PetscPartitioner_Simple *p;
1203   PetscErrorCode           ierr;
1204 
1205   PetscFunctionBegin;
1206   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1207   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1208   part->data = p;
1209 
1210   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1211   PetscFunctionReturn(0);
1212 }
1213 
1214 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1215 {
1216   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1217   PetscErrorCode          ierr;
1218 
1219   PetscFunctionBegin;
1220   ierr = PetscFree(p);CHKERRQ(ierr);
1221   PetscFunctionReturn(0);
1222 }
1223 
1224 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1225 {
1226   PetscFunctionBegin;
1227   PetscFunctionReturn(0);
1228 }
1229 
1230 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1231 {
1232   PetscBool      iascii;
1233   PetscErrorCode ierr;
1234 
1235   PetscFunctionBegin;
1236   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1237   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1238   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1239   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1240   PetscFunctionReturn(0);
1241 }
1242 
1243 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1244 {
1245   PetscInt       np;
1246   PetscErrorCode ierr;
1247 
1248   PetscFunctionBegin;
1249   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1250   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1251   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1252   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1253   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1258 {
1259   PetscFunctionBegin;
1260   part->ops->view      = PetscPartitionerView_Gather;
1261   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1262   part->ops->partition = PetscPartitionerPartition_Gather;
1263   PetscFunctionReturn(0);
1264 }
1265 
1266 /*MC
1267   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1268 
1269   Level: intermediate
1270 
1271 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1272 M*/
1273 
1274 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1275 {
1276   PetscPartitioner_Gather *p;
1277   PetscErrorCode           ierr;
1278 
1279   PetscFunctionBegin;
1280   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1281   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1282   part->data = p;
1283 
1284   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 
1289 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1290 {
1291   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1292   PetscErrorCode          ierr;
1293 
1294   PetscFunctionBegin;
1295   ierr = PetscFree(p);CHKERRQ(ierr);
1296   PetscFunctionReturn(0);
1297 }
1298 
1299 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1300 {
1301   PetscFunctionBegin;
1302   PetscFunctionReturn(0);
1303 }
1304 
1305 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1306 {
1307   PetscBool      iascii;
1308   PetscErrorCode ierr;
1309 
1310   PetscFunctionBegin;
1311   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1312   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1313   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1314   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1315   PetscFunctionReturn(0);
1316 }
1317 
1318 #if defined(PETSC_HAVE_CHACO)
1319 #if defined(PETSC_HAVE_UNISTD_H)
1320 #include <unistd.h>
1321 #endif
1322 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1323 #include <chaco.h>
1324 #else
1325 /* Older versions of Chaco do not have an include file */
1326 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1327                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1328                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1329                        int mesh_dims[3], double *goal, int global_method, int local_method,
1330                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1331 #endif
1332 extern int FREE_GRAPH;
1333 #endif
1334 
1335 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1336 {
1337 #if defined(PETSC_HAVE_CHACO)
1338   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1339   MPI_Comm       comm;
1340   int            nvtxs          = numVertices; /* number of vertices in full graph */
1341   int           *vwgts          = NULL;   /* weights for all vertices */
1342   float         *ewgts          = NULL;   /* weights for all edges */
1343   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1344   char          *outassignname  = NULL;   /*  name of assignment output file */
1345   char          *outfilename    = NULL;   /* output file name */
1346   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1347   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1348   int            mesh_dims[3];            /* dimensions of mesh of processors */
1349   double        *goal          = NULL;    /* desired set sizes for each set */
1350   int            global_method = 1;       /* global partitioning algorithm */
1351   int            local_method  = 1;       /* local partitioning algorithm */
1352   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1353   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1354   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1355   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1356   long           seed          = 123636512; /* for random graph mutations */
1357 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1358   int           *assignment;              /* Output partition */
1359 #else
1360   short int     *assignment;              /* Output partition */
1361 #endif
1362   int            fd_stdout, fd_pipe[2];
1363   PetscInt      *points;
1364   int            i, v, p;
1365   PetscErrorCode ierr;
1366 
1367   PetscFunctionBegin;
1368   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1369 #if defined (PETSC_USE_DEBUG)
1370   {
1371     int ival,isum;
1372     PetscBool distributed;
1373 
1374     ival = (numVertices > 0);
1375     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1376     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1377     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1378   }
1379 #endif
1380   if (!numVertices) {
1381     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1382     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1383     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1384     PetscFunctionReturn(0);
1385   }
1386   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1387   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1388 
1389   if (global_method == INERTIAL_METHOD) {
1390     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1391     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1392   }
1393   mesh_dims[0] = nparts;
1394   mesh_dims[1] = 1;
1395   mesh_dims[2] = 1;
1396   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1397   /* Chaco outputs to stdout. We redirect this to a buffer. */
1398   /* TODO: check error codes for UNIX calls */
1399 #if defined(PETSC_HAVE_UNISTD_H)
1400   {
1401     int piperet;
1402     piperet = pipe(fd_pipe);
1403     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1404     fd_stdout = dup(1);
1405     close(1);
1406     dup2(fd_pipe[1], 1);
1407   }
1408 #endif
1409   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1410                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1411                    vmax, ndims, eigtol, seed);
1412 #if defined(PETSC_HAVE_UNISTD_H)
1413   {
1414     char msgLog[10000];
1415     int  count;
1416 
1417     fflush(stdout);
1418     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1419     if (count < 0) count = 0;
1420     msgLog[count] = 0;
1421     close(1);
1422     dup2(fd_stdout, 1);
1423     close(fd_stdout);
1424     close(fd_pipe[0]);
1425     close(fd_pipe[1]);
1426     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1427   }
1428 #else
1429   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1430 #endif
1431   /* Convert to PetscSection+IS */
1432   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1433   for (v = 0; v < nvtxs; ++v) {
1434     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1435   }
1436   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1437   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1438   for (p = 0, i = 0; p < nparts; ++p) {
1439     for (v = 0; v < nvtxs; ++v) {
1440       if (assignment[v] == p) points[i++] = v;
1441     }
1442   }
1443   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1444   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1445   if (global_method == INERTIAL_METHOD) {
1446     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1447   }
1448   ierr = PetscFree(assignment);CHKERRQ(ierr);
1449   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1450   PetscFunctionReturn(0);
1451 #else
1452   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1453 #endif
1454 }
1455 
1456 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1457 {
1458   PetscFunctionBegin;
1459   part->ops->view      = PetscPartitionerView_Chaco;
1460   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1461   part->ops->partition = PetscPartitionerPartition_Chaco;
1462   PetscFunctionReturn(0);
1463 }
1464 
1465 /*MC
1466   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1467 
1468   Level: intermediate
1469 
1470 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1471 M*/
1472 
1473 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1474 {
1475   PetscPartitioner_Chaco *p;
1476   PetscErrorCode          ierr;
1477 
1478   PetscFunctionBegin;
1479   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1480   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1481   part->data = p;
1482 
1483   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1484   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1485   PetscFunctionReturn(0);
1486 }
1487 
1488 static const char *ptypes[] = {"kway", "rb"};
1489 
1490 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1491 {
1492   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1493   PetscErrorCode             ierr;
1494 
1495   PetscFunctionBegin;
1496   ierr = PetscFree(p);CHKERRQ(ierr);
1497   PetscFunctionReturn(0);
1498 }
1499 
1500 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1501 {
1502   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1503   PetscErrorCode             ierr;
1504 
1505   PetscFunctionBegin;
1506   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1507   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1508   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1509   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1510   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1511   PetscFunctionReturn(0);
1512 }
1513 
1514 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1515 {
1516   PetscBool      iascii;
1517   PetscErrorCode ierr;
1518 
1519   PetscFunctionBegin;
1520   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1521   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1522   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1523   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1524   PetscFunctionReturn(0);
1525 }
1526 
1527 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1528 {
1529   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1530   PetscInt                  p_randomSeed = -1; /* TODO: Add this to PetscPartitioner_ParMetis */
1531   PetscErrorCode            ierr;
1532 
1533   PetscFunctionBegin;
1534   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1535   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1536   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1537   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1538   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p_randomSeed, &p_randomSeed, NULL);CHKERRQ(ierr);
1539   ierr = PetscOptionsTail();CHKERRQ(ierr);
1540   PetscFunctionReturn(0);
1541 }
1542 
1543 #if defined(PETSC_HAVE_PARMETIS)
1544 #include <parmetis.h>
1545 #endif
1546 
1547 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1548 {
1549 #if defined(PETSC_HAVE_PARMETIS)
1550   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1551   MPI_Comm       comm;
1552   PetscSection   section;
1553   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1554   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1555   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1556   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1557   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1558   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1559   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1560   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1561   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1562   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1563   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1564   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1565   PetscInt       options[64];               /* Options */
1566   /* Outputs */
1567   PetscInt       v, i, *assignment, *points;
1568   PetscMPIInt    size, rank, p;
1569   PetscInt       pm_randomSeed = -1;
1570   PetscErrorCode ierr;
1571 
1572   PetscFunctionBegin;
1573   ierr = PetscOptionsGetInt(((PetscObject)part)->options,((PetscObject)part)->prefix,"-petscpartitioner_parmetis_seed", &pm_randomSeed, NULL);CHKERRQ(ierr);
1574   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1575   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1576   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1577   /* Calculate vertex distribution */
1578   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1579   vtxdist[0] = 0;
1580   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1581   for (p = 2; p <= size; ++p) {
1582     vtxdist[p] += vtxdist[p-1];
1583   }
1584   /* Calculate partition weights */
1585   for (p = 0; p < nparts; ++p) {
1586     tpwgts[p] = 1.0/nparts;
1587   }
1588   ubvec[0] = pm->imbalanceRatio;
1589   /* Weight cells by dofs on cell by default */
1590   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1591   if (section) {
1592     PetscInt cStart, cEnd, dof;
1593 
1594     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1595     for (v = cStart; v < cEnd; ++v) {
1596       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1597       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1598       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1599       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1600     }
1601   } else {
1602     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1603   }
1604   wgtflag |= 2; /* have weights on graph vertices */
1605 
1606   if (nparts == 1) {
1607     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1608   } else {
1609     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1610     if (vtxdist[p+1] == vtxdist[size]) {
1611       if (rank == p) {
1612         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1613         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1614         options[METIS_OPTION_SEED]   = pm_randomSeed;
1615         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1616         if (metis_ptype == 1) {
1617           PetscStackPush("METIS_PartGraphRecursive");
1618           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1619           PetscStackPop;
1620           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1621         } else {
1622           /*
1623            It would be nice to activate the two options below, but they would need some actual testing.
1624            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1625            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1626           */
1627           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1628           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1629           PetscStackPush("METIS_PartGraphKway");
1630           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1631           PetscStackPop;
1632           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1633         }
1634       }
1635     } else {
1636       options[0] = 1; /*use options */
1637       options[1] = pm->debugFlag;
1638       options[2] = (pm_randomSeed == -1) ? 15 : pm_randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1639       PetscStackPush("ParMETIS_V3_PartKway");
1640       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1641       PetscStackPop;
1642       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1643     }
1644   }
1645   /* Convert to PetscSection+IS */
1646   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1647   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1648   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1649   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1650   for (p = 0, i = 0; p < nparts; ++p) {
1651     for (v = 0; v < nvtxs; ++v) {
1652       if (assignment[v] == p) points[i++] = v;
1653     }
1654   }
1655   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1656   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1657   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1658   PetscFunctionReturn(0);
1659 #else
1660   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1661 #endif
1662 }
1663 
1664 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1665 {
1666   PetscFunctionBegin;
1667   part->ops->view           = PetscPartitionerView_ParMetis;
1668   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1669   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1670   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1671   PetscFunctionReturn(0);
1672 }
1673 
1674 /*MC
1675   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1676 
1677   Level: intermediate
1678 
1679 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1680 M*/
1681 
1682 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1683 {
1684   PetscPartitioner_ParMetis *p;
1685   PetscErrorCode          ierr;
1686 
1687   PetscFunctionBegin;
1688   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1689   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1690   part->data = p;
1691 
1692   p->ptype          = 0;
1693   p->imbalanceRatio = 1.05;
1694   p->debugFlag      = 0;
1695 
1696   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1697   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1698   PetscFunctionReturn(0);
1699 }
1700 
1701 
1702 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1703 const char PTScotchPartitionerCitation[] =
1704   "@article{PTSCOTCH,\n"
1705   "  author  = {C. Chevalier and F. Pellegrini},\n"
1706   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1707   "  journal = {Parallel Computing},\n"
1708   "  volume  = {34},\n"
1709   "  number  = {6},\n"
1710   "  pages   = {318--331},\n"
1711   "  year    = {2008},\n"
1712   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1713   "}\n";
1714 
1715 typedef struct {
1716   PetscInt  strategy;
1717   PetscReal imbalance;
1718 } PetscPartitioner_PTScotch;
1719 
1720 static const char *const
1721 PTScotchStrategyList[] = {
1722   "DEFAULT",
1723   "QUALITY",
1724   "SPEED",
1725   "BALANCE",
1726   "SAFETY",
1727   "SCALABILITY",
1728   "RECURSIVE",
1729   "REMAP"
1730 };
1731 
1732 #if defined(PETSC_HAVE_PTSCOTCH)
1733 
1734 EXTERN_C_BEGIN
1735 #include <ptscotch.h>
1736 EXTERN_C_END
1737 
1738 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1739 
1740 static int PTScotch_Strategy(PetscInt strategy)
1741 {
1742   switch (strategy) {
1743   case  0: return SCOTCH_STRATDEFAULT;
1744   case  1: return SCOTCH_STRATQUALITY;
1745   case  2: return SCOTCH_STRATSPEED;
1746   case  3: return SCOTCH_STRATBALANCE;
1747   case  4: return SCOTCH_STRATSAFETY;
1748   case  5: return SCOTCH_STRATSCALABILITY;
1749   case  6: return SCOTCH_STRATRECURSIVE;
1750   case  7: return SCOTCH_STRATREMAP;
1751   default: return SCOTCH_STRATDEFAULT;
1752   }
1753 }
1754 
1755 
1756 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1757                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1758 {
1759   SCOTCH_Graph   grafdat;
1760   SCOTCH_Strat   stradat;
1761   SCOTCH_Num     vertnbr = n;
1762   SCOTCH_Num     edgenbr = xadj[n];
1763   SCOTCH_Num*    velotab = vtxwgt;
1764   SCOTCH_Num*    edlotab = adjwgt;
1765   SCOTCH_Num     flagval = strategy;
1766   double         kbalval = imbalance;
1767   PetscErrorCode ierr;
1768 
1769   PetscFunctionBegin;
1770   {
1771     PetscBool flg = PETSC_TRUE;
1772     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1773     if (!flg) velotab = NULL;
1774   }
1775   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1776   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1777   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1778   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1779 #if defined(PETSC_USE_DEBUG)
1780   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1781 #endif
1782   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1783   SCOTCH_stratExit(&stradat);
1784   SCOTCH_graphExit(&grafdat);
1785   PetscFunctionReturn(0);
1786 }
1787 
1788 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1789                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1790 {
1791   PetscMPIInt     procglbnbr;
1792   PetscMPIInt     proclocnum;
1793   SCOTCH_Arch     archdat;
1794   SCOTCH_Dgraph   grafdat;
1795   SCOTCH_Dmapping mappdat;
1796   SCOTCH_Strat    stradat;
1797   SCOTCH_Num      vertlocnbr;
1798   SCOTCH_Num      edgelocnbr;
1799   SCOTCH_Num*     veloloctab = vtxwgt;
1800   SCOTCH_Num*     edloloctab = adjwgt;
1801   SCOTCH_Num      flagval = strategy;
1802   double          kbalval = imbalance;
1803   PetscErrorCode  ierr;
1804 
1805   PetscFunctionBegin;
1806   {
1807     PetscBool flg = PETSC_TRUE;
1808     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1809     if (!flg) veloloctab = NULL;
1810   }
1811   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1812   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1813   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1814   edgelocnbr = xadj[vertlocnbr];
1815 
1816   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1817   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1818 #if defined(PETSC_USE_DEBUG)
1819   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1820 #endif
1821   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1822   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1823   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1824   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1825   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1826   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1827   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1828   SCOTCH_archExit(&archdat);
1829   SCOTCH_stratExit(&stradat);
1830   SCOTCH_dgraphExit(&grafdat);
1831   PetscFunctionReturn(0);
1832 }
1833 
1834 #endif /* PETSC_HAVE_PTSCOTCH */
1835 
1836 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1837 {
1838   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1839   PetscErrorCode             ierr;
1840 
1841   PetscFunctionBegin;
1842   ierr = PetscFree(p);CHKERRQ(ierr);
1843   PetscFunctionReturn(0);
1844 }
1845 
1846 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1847 {
1848   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1849   PetscErrorCode            ierr;
1850 
1851   PetscFunctionBegin;
1852   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1853   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1854   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1855   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1856   PetscFunctionReturn(0);
1857 }
1858 
1859 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1860 {
1861   PetscBool      iascii;
1862   PetscErrorCode ierr;
1863 
1864   PetscFunctionBegin;
1865   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1866   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1867   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1868   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1869   PetscFunctionReturn(0);
1870 }
1871 
1872 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1873 {
1874   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1875   const char *const         *slist = PTScotchStrategyList;
1876   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1877   PetscBool                 flag;
1878   PetscErrorCode            ierr;
1879 
1880   PetscFunctionBegin;
1881   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1882   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1883   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1884   ierr = PetscOptionsTail();CHKERRQ(ierr);
1885   PetscFunctionReturn(0);
1886 }
1887 
1888 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1889 {
1890 #if defined(PETSC_HAVE_PTSCOTCH)
1891   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1892   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1893   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1894   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1895   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1896   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1897   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1898   PetscInt       v, i, *assignment, *points;
1899   PetscMPIInt    size, rank, p;
1900   PetscErrorCode ierr;
1901 
1902   PetscFunctionBegin;
1903   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1904   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1905   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1906 
1907   /* Calculate vertex distribution */
1908   vtxdist[0] = 0;
1909   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1910   for (p = 2; p <= size; ++p) {
1911     vtxdist[p] += vtxdist[p-1];
1912   }
1913 
1914   if (nparts == 1) {
1915     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1916   } else {
1917     PetscSection section;
1918     /* Weight cells by dofs on cell by default */
1919     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1920     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1921     if (section) {
1922       PetscInt vStart, vEnd, dof;
1923       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1924       for (v = vStart; v < vEnd; ++v) {
1925         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1926         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1927         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1928         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1929       }
1930     } else {
1931       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1932     }
1933     {
1934       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1935       int                       strat = PTScotch_Strategy(pts->strategy);
1936       double                    imbal = (double)pts->imbalance;
1937 
1938       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1939       if (vtxdist[p+1] == vtxdist[size]) {
1940         if (rank == p) {
1941           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1942         }
1943       } else {
1944         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1945       }
1946     }
1947     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1948   }
1949 
1950   /* Convert to PetscSection+IS */
1951   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1952   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1953   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1954   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1955   for (p = 0, i = 0; p < nparts; ++p) {
1956     for (v = 0; v < nvtxs; ++v) {
1957       if (assignment[v] == p) points[i++] = v;
1958     }
1959   }
1960   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1961   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1962 
1963   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1964   PetscFunctionReturn(0);
1965 #else
1966   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1967 #endif
1968 }
1969 
1970 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1971 {
1972   PetscFunctionBegin;
1973   part->ops->view           = PetscPartitionerView_PTScotch;
1974   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1975   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1976   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1977   PetscFunctionReturn(0);
1978 }
1979 
1980 /*MC
1981   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1982 
1983   Level: intermediate
1984 
1985 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1986 M*/
1987 
1988 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1989 {
1990   PetscPartitioner_PTScotch *p;
1991   PetscErrorCode          ierr;
1992 
1993   PetscFunctionBegin;
1994   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1995   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1996   part->data = p;
1997 
1998   p->strategy  = 0;
1999   p->imbalance = 0.01;
2000 
2001   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2002   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2003   PetscFunctionReturn(0);
2004 }
2005 
2006 
2007 /*@
2008   DMPlexGetPartitioner - Get the mesh partitioner
2009 
2010   Not collective
2011 
2012   Input Parameter:
2013 . dm - The DM
2014 
2015   Output Parameter:
2016 . part - The PetscPartitioner
2017 
2018   Level: developer
2019 
2020   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2021 
2022 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
2023 @*/
2024 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2025 {
2026   DM_Plex       *mesh = (DM_Plex *) dm->data;
2027 
2028   PetscFunctionBegin;
2029   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2030   PetscValidPointer(part, 2);
2031   *part = mesh->partitioner;
2032   PetscFunctionReturn(0);
2033 }
2034 
2035 /*@
2036   DMPlexSetPartitioner - Set the mesh partitioner
2037 
2038   logically collective on dm and part
2039 
2040   Input Parameters:
2041 + dm - The DM
2042 - part - The partitioner
2043 
2044   Level: developer
2045 
2046   Note: Any existing PetscPartitioner will be destroyed.
2047 
2048 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2049 @*/
2050 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2051 {
2052   DM_Plex       *mesh = (DM_Plex *) dm->data;
2053   PetscErrorCode ierr;
2054 
2055   PetscFunctionBegin;
2056   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2057   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2058   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2059   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2060   mesh->partitioner = part;
2061   PetscFunctionReturn(0);
2062 }
2063 
2064 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2065 {
2066   PetscErrorCode ierr;
2067 
2068   PetscFunctionBegin;
2069   if (up) {
2070     PetscInt parent;
2071 
2072     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2073     if (parent != point) {
2074       PetscInt closureSize, *closure = NULL, i;
2075 
2076       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2077       for (i = 0; i < closureSize; i++) {
2078         PetscInt cpoint = closure[2*i];
2079 
2080         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2081         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2082       }
2083       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2084     }
2085   }
2086   if (down) {
2087     PetscInt numChildren;
2088     const PetscInt *children;
2089 
2090     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2091     if (numChildren) {
2092       PetscInt i;
2093 
2094       for (i = 0; i < numChildren; i++) {
2095         PetscInt cpoint = children[i];
2096 
2097         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2098         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2099       }
2100     }
2101   }
2102   PetscFunctionReturn(0);
2103 }
2104 
2105 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
2106 
2107 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2108 {
2109   DM_Plex        *mesh = (DM_Plex *)dm->data;
2110   PetscBool      hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2111   PetscInt       *closure = NULL, closureSize, nelems, *elems, off = 0, p, c;
2112   PetscHSetI     ht;
2113   PetscErrorCode ierr;
2114 
2115   PetscFunctionBegin;
2116   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2117   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2118   for (p = 0; p < numPoints; ++p) {
2119     ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2120     for (c = 0; c < closureSize*2; c += 2) {
2121       ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2122       if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2123     }
2124   }
2125   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2126   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2127   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2128   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2129   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2130   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2131   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2132   PetscFunctionReturn(0);
2133 }
2134 
2135 /*@
2136   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2137 
2138   Input Parameters:
2139 + dm     - The DM
2140 - label  - DMLabel assinging ranks to remote roots
2141 
2142   Level: developer
2143 
2144 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2145 @*/
2146 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2147 {
2148   IS              rankIS,   pointIS, closureIS;
2149   const PetscInt *ranks,   *points;
2150   PetscInt        numRanks, numPoints, r;
2151   PetscErrorCode  ierr;
2152 
2153   PetscFunctionBegin;
2154   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2155   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2156   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2157   for (r = 0; r < numRanks; ++r) {
2158     const PetscInt rank = ranks[r];
2159     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2160     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2161     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2162     ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr);
2163     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2164     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2165     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2166     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2167   }
2168   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2169   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2170   PetscFunctionReturn(0);
2171 }
2172 
2173 /*@
2174   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2175 
2176   Input Parameters:
2177 + dm     - The DM
2178 - label  - DMLabel assinging ranks to remote roots
2179 
2180   Level: developer
2181 
2182 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2183 @*/
2184 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2185 {
2186   IS              rankIS,   pointIS;
2187   const PetscInt *ranks,   *points;
2188   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2189   PetscInt       *adj = NULL;
2190   PetscErrorCode  ierr;
2191 
2192   PetscFunctionBegin;
2193   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2194   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2195   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2196   for (r = 0; r < numRanks; ++r) {
2197     const PetscInt rank = ranks[r];
2198 
2199     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2200     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2201     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2202     for (p = 0; p < numPoints; ++p) {
2203       adjSize = PETSC_DETERMINE;
2204       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2205       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2206     }
2207     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2208     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2209   }
2210   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2211   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2212   ierr = PetscFree(adj);CHKERRQ(ierr);
2213   PetscFunctionReturn(0);
2214 }
2215 
2216 /*@
2217   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2218 
2219   Input Parameters:
2220 + dm     - The DM
2221 - label  - DMLabel assinging ranks to remote roots
2222 
2223   Level: developer
2224 
2225   Note: This is required when generating multi-level overlaps to capture
2226   overlap points from non-neighbouring partitions.
2227 
2228 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2229 @*/
2230 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2231 {
2232   MPI_Comm        comm;
2233   PetscMPIInt     rank;
2234   PetscSF         sfPoint;
2235   DMLabel         lblRoots, lblLeaves;
2236   IS              rankIS, pointIS;
2237   const PetscInt *ranks;
2238   PetscInt        numRanks, r;
2239   PetscErrorCode  ierr;
2240 
2241   PetscFunctionBegin;
2242   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2243   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2244   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2245   /* Pull point contributions from remote leaves into local roots */
2246   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2247   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2248   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2249   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2250   for (r = 0; r < numRanks; ++r) {
2251     const PetscInt remoteRank = ranks[r];
2252     if (remoteRank == rank) continue;
2253     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2254     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2255     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2256   }
2257   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2258   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2259   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2260   /* Push point contributions from roots into remote leaves */
2261   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2262   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2263   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2264   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2265   for (r = 0; r < numRanks; ++r) {
2266     const PetscInt remoteRank = ranks[r];
2267     if (remoteRank == rank) continue;
2268     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2269     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2270     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2271   }
2272   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2273   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2274   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2275   PetscFunctionReturn(0);
2276 }
2277 
2278 /*@
2279   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2280 
2281   Input Parameters:
2282 + dm        - The DM
2283 . rootLabel - DMLabel assinging ranks to local roots
2284 . processSF - A star forest mapping into the local index on each remote rank
2285 
2286   Output Parameter:
2287 - leafLabel - DMLabel assinging ranks to remote roots
2288 
2289   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2290   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2291 
2292   Level: developer
2293 
2294 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2295 @*/
2296 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2297 {
2298   MPI_Comm           comm;
2299   PetscMPIInt        rank, size, r;
2300   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2301   PetscSF            sfPoint;
2302   PetscSection       rootSection;
2303   PetscSFNode       *rootPoints, *leafPoints;
2304   const PetscSFNode *remote;
2305   const PetscInt    *local, *neighbors;
2306   IS                 valueIS;
2307   PetscBool          mpiOverflow = PETSC_FALSE;
2308   PetscErrorCode     ierr;
2309 
2310   PetscFunctionBegin;
2311   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2312   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2313   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2314   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2315 
2316   /* Convert to (point, rank) and use actual owners */
2317   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2318   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2319   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2320   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2321   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2322   for (n = 0; n < numNeighbors; ++n) {
2323     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2324     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2325   }
2326   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2327   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2328   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2329   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2330   for (n = 0; n < numNeighbors; ++n) {
2331     IS              pointIS;
2332     const PetscInt *points;
2333 
2334     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2335     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2336     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2337     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2338     for (p = 0; p < numPoints; ++p) {
2339       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2340       else       {l = -1;}
2341       if (l >= 0) {rootPoints[off+p] = remote[l];}
2342       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2343     }
2344     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2345     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2346   }
2347 
2348   /* Try to communicate overlap using All-to-All */
2349   if (!processSF) {
2350     PetscInt64  counter = 0;
2351     PetscBool   locOverflow = PETSC_FALSE;
2352     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2353 
2354     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2355     for (n = 0; n < numNeighbors; ++n) {
2356       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2357       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2358 #if defined(PETSC_USE_64BIT_INDICES)
2359       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2360       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2361 #endif
2362       scounts[neighbors[n]] = (PetscMPIInt) dof;
2363       sdispls[neighbors[n]] = (PetscMPIInt) off;
2364     }
2365     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2366     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2367     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2368     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2369     if (!mpiOverflow) {
2370       leafSize = (PetscInt) counter;
2371       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2372       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2373     }
2374     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2375   }
2376 
2377   /* Communicate overlap using process star forest */
2378   if (processSF || mpiOverflow) {
2379     PetscSF      procSF;
2380     PetscSFNode  *remote;
2381     PetscSection leafSection;
2382 
2383     if (processSF) {
2384       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2385       procSF = processSF;
2386     } else {
2387       ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr);
2388       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2389       ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr);
2390       ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2391     }
2392 
2393     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2394     ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2395     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2396     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2397     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2398   }
2399 
2400   for (p = 0; p < leafSize; p++) {
2401     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2402   }
2403 
2404   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2405   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2406   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2407   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2408   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2409   PetscFunctionReturn(0);
2410 }
2411 
2412 /*@
2413   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2414 
2415   Input Parameters:
2416 + dm    - The DM
2417 . label - DMLabel assinging ranks to remote roots
2418 
2419   Output Parameter:
2420 - sf    - The star forest communication context encapsulating the defined mapping
2421 
2422   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2423 
2424   Level: developer
2425 
2426 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2427 @*/
2428 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2429 {
2430   PetscMPIInt     rank, size;
2431   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2432   PetscSFNode    *remotePoints;
2433   IS              remoteRootIS;
2434   const PetscInt *remoteRoots;
2435   PetscErrorCode ierr;
2436 
2437   PetscFunctionBegin;
2438   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2439   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2440 
2441   for (numRemote = 0, n = 0; n < size; ++n) {
2442     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2443     numRemote += numPoints;
2444   }
2445   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2446   /* Put owned points first */
2447   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2448   if (numPoints > 0) {
2449     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2450     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2451     for (p = 0; p < numPoints; p++) {
2452       remotePoints[idx].index = remoteRoots[p];
2453       remotePoints[idx].rank = rank;
2454       idx++;
2455     }
2456     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2457     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2458   }
2459   /* Now add remote points */
2460   for (n = 0; n < size; ++n) {
2461     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2462     if (numPoints <= 0 || n == rank) continue;
2463     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2464     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2465     for (p = 0; p < numPoints; p++) {
2466       remotePoints[idx].index = remoteRoots[p];
2467       remotePoints[idx].rank = n;
2468       idx++;
2469     }
2470     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2471     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2472   }
2473   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2474   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2475   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2476   PetscFunctionReturn(0);
2477 }
2478