xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 9dcd2ab3d0c1bf9fba43467229cff6a5939ac9e4)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
31 
32 /*@C
33   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
34 
35   Input Parameters:
36 + dm      - The mesh DM dm
37 - height  - Height of the strata from which to construct the graph
38 
39   Output Parameter:
40 + numVertices     - Number of vertices in the graph
41 . offsets         - Point offsets in the graph
42 . adjacency       - Point connectivity in the graph
43 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
44 
45   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
46   representation on the mesh.
47 
48   Level: developer
49 
50 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
51 @*/
52 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
53 {
54   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
55   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
56   IS             cellNumbering;
57   const PetscInt *cellNum;
58   PetscBool      useCone, useClosure;
59   PetscSection   section;
60   PetscSegBuffer adjBuffer;
61   PetscSF        sfPoint;
62   PetscInt       *adjCells = NULL, *remoteCells = NULL;
63   const PetscInt *local;
64   PetscInt       nroots, nleaves, l;
65   PetscMPIInt    rank;
66   PetscErrorCode ierr;
67 
68   PetscFunctionBegin;
69   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
70   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
71   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
72   if (dim != depth) {
73     /* We do not handle the uninterpolated case here */
74     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
75     /* DMPlexCreateNeighborCSR does not make a numbering */
76     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
77     /* Different behavior for empty graphs */
78     if (!*numVertices) {
79       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
80       (*offsets)[0] = 0;
81     }
82     /* Broken in parallel */
83     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
84     PetscFunctionReturn(0);
85   }
86   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
87   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
88   /* Build adjacency graph via a section/segbuffer */
89   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
90   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
91   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
92   /* Always use FVM adjacency to create partitioner graph */
93   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
94   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
95   ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
96   if (globalNumbering) {
97     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
98     *globalNumbering = cellNumbering;
99   }
100   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
101   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
102      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
103    */
104   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
105   if (nroots >= 0) {
106     PetscInt fStart, fEnd, f;
107 
108     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
109     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
110     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
111     for (f = fStart; f < fEnd; ++f) {
112       const PetscInt *support;
113       PetscInt        supportSize;
114 
115       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
116       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
117       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
118       else if (supportSize == 2) {
119         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
120         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
121         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
122         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
123       }
124       /* Handle non-conforming meshes */
125       if (supportSize > 2) {
126         PetscInt        numChildren, i;
127         const PetscInt *children;
128 
129         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
130         for (i = 0; i < numChildren; ++i) {
131           const PetscInt child = children[i];
132           if (fStart <= child && child < fEnd) {
133             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
134             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
135             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
136             else if (supportSize == 2) {
137               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
138               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
139               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
140               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
141             }
142           }
143         }
144       }
145     }
146     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
147     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
148     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
149     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
150     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
151   }
152   /* Combine local and global adjacencies */
153   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
154     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
155     if (nroots > 0) {if (cellNum[p] < 0) continue;}
156     /* Add remote cells */
157     if (remoteCells) {
158       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
159       PetscInt       coneSize, numChildren, c, i;
160       const PetscInt *cone, *children;
161 
162       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
163       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
164       for (c = 0; c < coneSize; ++c) {
165         const PetscInt point = cone[c];
166         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
167           PetscInt *PETSC_RESTRICT pBuf;
168           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
169           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
170           *pBuf = remoteCells[point];
171         }
172         /* Handle non-conforming meshes */
173         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
174         for (i = 0; i < numChildren; ++i) {
175           const PetscInt child = children[i];
176           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
177             PetscInt *PETSC_RESTRICT pBuf;
178             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
179             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
180             *pBuf = remoteCells[child];
181           }
182         }
183       }
184     }
185     /* Add local cells */
186     adjSize = PETSC_DETERMINE;
187     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
188     for (a = 0; a < adjSize; ++a) {
189       const PetscInt point = adj[a];
190       if (point != p && pStart <= point && point < pEnd) {
191         PetscInt *PETSC_RESTRICT pBuf;
192         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
193         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
194         *pBuf = DMPlex_GlobalID(cellNum[point]);
195       }
196     }
197     (*numVertices)++;
198   }
199   ierr = PetscFree(adj);CHKERRQ(ierr);
200   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
201   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
202 
203   /* Derive CSR graph from section/segbuffer */
204   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
205   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
206   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
207   for (idx = 0, p = pStart; p < pEnd; p++) {
208     if (nroots > 0) {if (cellNum[p] < 0) continue;}
209     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
210   }
211   vOffsets[*numVertices] = size;
212   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
213 
214   if (nroots >= 0) {
215     /* Filter out duplicate edges using section/segbuffer */
216     ierr = PetscSectionReset(section);CHKERRQ(ierr);
217     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
218     for (p = 0; p < *numVertices; p++) {
219       PetscInt start = vOffsets[p], end = vOffsets[p+1];
220       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
221       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
222       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
223       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
224       ierr = PetscMemcpy(edges, &graph[start], numEdges*sizeof(*edges));CHKERRQ(ierr);
225     }
226     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
227     ierr = PetscFree(graph);CHKERRQ(ierr);
228     /* Derive CSR graph from section/segbuffer */
229     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
230     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
231     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
232     for (idx = 0, p = 0; p < *numVertices; p++) {
233       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
234     }
235     vOffsets[*numVertices] = size;
236     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
237   } else {
238     /* Sort adjacencies (not strictly necessary) */
239     for (p = 0; p < *numVertices; p++) {
240       PetscInt start = vOffsets[p], end = vOffsets[p+1];
241       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
242     }
243   }
244 
245   if (offsets) *offsets = vOffsets;
246   if (adjacency) *adjacency = graph;
247 
248   /* Cleanup */
249   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
250   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
251   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
252   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 /*@C
257   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
258 
259   Collective
260 
261   Input Arguments:
262 + dm - The DMPlex
263 - cellHeight - The height of mesh points to treat as cells (default should be 0)
264 
265   Output Arguments:
266 + numVertices - The number of local vertices in the graph, or cells in the mesh.
267 . offsets     - The offset to the adjacency list for each cell
268 - adjacency   - The adjacency list for all cells
269 
270   Note: This is suitable for input to a mesh partitioner like ParMetis.
271 
272   Level: advanced
273 
274 .seealso: DMPlexCreate()
275 @*/
276 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
277 {
278   const PetscInt maxFaceCases = 30;
279   PetscInt       numFaceCases = 0;
280   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
281   PetscInt      *off, *adj;
282   PetscInt      *neighborCells = NULL;
283   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
284   PetscErrorCode ierr;
285 
286   PetscFunctionBegin;
287   /* For parallel partitioning, I think you have to communicate supports */
288   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
289   cellDim = dim - cellHeight;
290   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
291   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
292   if (cEnd - cStart == 0) {
293     if (numVertices) *numVertices = 0;
294     if (offsets)   *offsets   = NULL;
295     if (adjacency) *adjacency = NULL;
296     PetscFunctionReturn(0);
297   }
298   numCells  = cEnd - cStart;
299   faceDepth = depth - cellHeight;
300   if (dim == depth) {
301     PetscInt f, fStart, fEnd;
302 
303     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
304     /* Count neighboring cells */
305     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
306     for (f = fStart; f < fEnd; ++f) {
307       const PetscInt *support;
308       PetscInt        supportSize;
309       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
310       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
311       if (supportSize == 2) {
312         PetscInt numChildren;
313 
314         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
315         if (!numChildren) {
316           ++off[support[0]-cStart+1];
317           ++off[support[1]-cStart+1];
318         }
319       }
320     }
321     /* Prefix sum */
322     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
323     if (adjacency) {
324       PetscInt *tmp;
325 
326       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
327       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
328       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
329       /* Get neighboring cells */
330       for (f = fStart; f < fEnd; ++f) {
331         const PetscInt *support;
332         PetscInt        supportSize;
333         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
334         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
335         if (supportSize == 2) {
336           PetscInt numChildren;
337 
338           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
339           if (!numChildren) {
340             adj[tmp[support[0]-cStart]++] = support[1];
341             adj[tmp[support[1]-cStart]++] = support[0];
342           }
343         }
344       }
345 #if defined(PETSC_USE_DEBUG)
346       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
347 #endif
348       ierr = PetscFree(tmp);CHKERRQ(ierr);
349     }
350     if (numVertices) *numVertices = numCells;
351     if (offsets)   *offsets   = off;
352     if (adjacency) *adjacency = adj;
353     PetscFunctionReturn(0);
354   }
355   /* Setup face recognition */
356   if (faceDepth == 1) {
357     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
358 
359     for (c = cStart; c < cEnd; ++c) {
360       PetscInt corners;
361 
362       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
363       if (!cornersSeen[corners]) {
364         PetscInt nFV;
365 
366         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
367         cornersSeen[corners] = 1;
368 
369         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
370 
371         numFaceVertices[numFaceCases++] = nFV;
372       }
373     }
374   }
375   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
376   /* Count neighboring cells */
377   for (cell = cStart; cell < cEnd; ++cell) {
378     PetscInt numNeighbors = PETSC_DETERMINE, n;
379 
380     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
381     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
382     for (n = 0; n < numNeighbors; ++n) {
383       PetscInt        cellPair[2];
384       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
385       PetscInt        meetSize = 0;
386       const PetscInt *meet    = NULL;
387 
388       cellPair[0] = cell; cellPair[1] = neighborCells[n];
389       if (cellPair[0] == cellPair[1]) continue;
390       if (!found) {
391         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
392         if (meetSize) {
393           PetscInt f;
394 
395           for (f = 0; f < numFaceCases; ++f) {
396             if (numFaceVertices[f] == meetSize) {
397               found = PETSC_TRUE;
398               break;
399             }
400           }
401         }
402         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
403       }
404       if (found) ++off[cell-cStart+1];
405     }
406   }
407   /* Prefix sum */
408   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
409 
410   if (adjacency) {
411     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
412     /* Get neighboring cells */
413     for (cell = cStart; cell < cEnd; ++cell) {
414       PetscInt numNeighbors = PETSC_DETERMINE, n;
415       PetscInt cellOffset   = 0;
416 
417       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
418       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
419       for (n = 0; n < numNeighbors; ++n) {
420         PetscInt        cellPair[2];
421         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
422         PetscInt        meetSize = 0;
423         const PetscInt *meet    = NULL;
424 
425         cellPair[0] = cell; cellPair[1] = neighborCells[n];
426         if (cellPair[0] == cellPair[1]) continue;
427         if (!found) {
428           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
429           if (meetSize) {
430             PetscInt f;
431 
432             for (f = 0; f < numFaceCases; ++f) {
433               if (numFaceVertices[f] == meetSize) {
434                 found = PETSC_TRUE;
435                 break;
436               }
437             }
438           }
439           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
440         }
441         if (found) {
442           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
443           ++cellOffset;
444         }
445       }
446     }
447   }
448   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
449   if (numVertices) *numVertices = numCells;
450   if (offsets)   *offsets   = off;
451   if (adjacency) *adjacency = adj;
452   PetscFunctionReturn(0);
453 }
454 
455 /*@C
456   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
457 
458   Not Collective
459 
460   Input Parameters:
461 + name        - The name of a new user-defined creation routine
462 - create_func - The creation routine itself
463 
464   Notes:
465   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
466 
467   Sample usage:
468 .vb
469     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
470 .ve
471 
472   Then, your PetscPartitioner type can be chosen with the procedural interface via
473 .vb
474     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
475     PetscPartitionerSetType(PetscPartitioner, "my_part");
476 .ve
477    or at runtime via the option
478 .vb
479     -petscpartitioner_type my_part
480 .ve
481 
482   Level: advanced
483 
484 .keywords: PetscPartitioner, register
485 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
486 
487 @*/
488 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
489 {
490   PetscErrorCode ierr;
491 
492   PetscFunctionBegin;
493   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
494   PetscFunctionReturn(0);
495 }
496 
497 /*@C
498   PetscPartitionerSetType - Builds a particular PetscPartitioner
499 
500   Collective on PetscPartitioner
501 
502   Input Parameters:
503 + part - The PetscPartitioner object
504 - name - The kind of partitioner
505 
506   Options Database Key:
507 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
508 
509   Level: intermediate
510 
511 .keywords: PetscPartitioner, set, type
512 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
513 @*/
514 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
515 {
516   PetscErrorCode (*r)(PetscPartitioner);
517   PetscBool      match;
518   PetscErrorCode ierr;
519 
520   PetscFunctionBegin;
521   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
522   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
523   if (match) PetscFunctionReturn(0);
524 
525   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
526   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
527   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
528 
529   if (part->ops->destroy) {
530     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
531   }
532   part->noGraph = PETSC_FALSE;
533   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
534   ierr = (*r)(part);CHKERRQ(ierr);
535   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
536   PetscFunctionReturn(0);
537 }
538 
539 /*@C
540   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
541 
542   Not Collective
543 
544   Input Parameter:
545 . part - The PetscPartitioner
546 
547   Output Parameter:
548 . name - The PetscPartitioner type name
549 
550   Level: intermediate
551 
552 .keywords: PetscPartitioner, get, type, name
553 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
554 @*/
555 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
556 {
557   PetscErrorCode ierr;
558 
559   PetscFunctionBegin;
560   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
561   PetscValidPointer(name, 2);
562   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
563   *name = ((PetscObject) part)->type_name;
564   PetscFunctionReturn(0);
565 }
566 
567 /*@C
568   PetscPartitionerView - Views a PetscPartitioner
569 
570   Collective on PetscPartitioner
571 
572   Input Parameter:
573 + part - the PetscPartitioner object to view
574 - v    - the viewer
575 
576   Level: developer
577 
578 .seealso: PetscPartitionerDestroy()
579 @*/
580 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
581 {
582   PetscMPIInt    size;
583   PetscBool      isascii;
584   PetscErrorCode ierr;
585 
586   PetscFunctionBegin;
587   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
588   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
589   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
590   if (isascii) {
591     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
592     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
593     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
594     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
595     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
596     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
597     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
598   }
599   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
600   PetscFunctionReturn(0);
601 }
602 
603 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
604 {
605   PetscFunctionBegin;
606   if (!currentType) {
607 #if defined(PETSC_HAVE_CHACO)
608     *defaultType = PETSCPARTITIONERCHACO;
609 #elif defined(PETSC_HAVE_PARMETIS)
610     *defaultType = PETSCPARTITIONERPARMETIS;
611 #elif defined(PETSC_HAVE_PTSCOTCH)
612     *defaultType = PETSCPARTITIONERPTSCOTCH;
613 #else
614     *defaultType = PETSCPARTITIONERSIMPLE;
615 #endif
616   } else {
617     *defaultType = currentType;
618   }
619   PetscFunctionReturn(0);
620 }
621 
622 /*@
623   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
624 
625   Collective on PetscPartitioner
626 
627   Input Parameter:
628 . part - the PetscPartitioner object to set options for
629 
630   Level: developer
631 
632 .seealso: PetscPartitionerView()
633 @*/
634 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
635 {
636   const char    *defaultType;
637   char           name[256];
638   PetscBool      flg;
639   PetscErrorCode ierr;
640 
641   PetscFunctionBegin;
642   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
643   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
644   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
645   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
646   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
647   if (flg) {
648     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
649   } else if (!((PetscObject) part)->type_name) {
650     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
651   }
652   if (part->ops->setfromoptions) {
653     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
654   }
655   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
656   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
657   /* process any options handlers added with PetscObjectAddOptionsHandler() */
658   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
659   ierr = PetscOptionsEnd();CHKERRQ(ierr);
660   PetscFunctionReturn(0);
661 }
662 
663 /*@C
664   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
665 
666   Collective on PetscPartitioner
667 
668   Input Parameter:
669 . part - the PetscPartitioner object to setup
670 
671   Level: developer
672 
673 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
674 @*/
675 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
676 {
677   PetscErrorCode ierr;
678 
679   PetscFunctionBegin;
680   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
681   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
682   PetscFunctionReturn(0);
683 }
684 
685 /*@
686   PetscPartitionerDestroy - Destroys a PetscPartitioner object
687 
688   Collective on PetscPartitioner
689 
690   Input Parameter:
691 . part - the PetscPartitioner object to destroy
692 
693   Level: developer
694 
695 .seealso: PetscPartitionerView()
696 @*/
697 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
698 {
699   PetscErrorCode ierr;
700 
701   PetscFunctionBegin;
702   if (!*part) PetscFunctionReturn(0);
703   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
704 
705   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
706   ((PetscObject) (*part))->refct = 0;
707 
708   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
709   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
710   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 /*@
715   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
716 
717   Collective on MPI_Comm
718 
719   Input Parameter:
720 . comm - The communicator for the PetscPartitioner object
721 
722   Output Parameter:
723 . part - The PetscPartitioner object
724 
725   Level: beginner
726 
727 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
728 @*/
729 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
730 {
731   PetscPartitioner p;
732   const char       *partitionerType = NULL;
733   PetscErrorCode   ierr;
734 
735   PetscFunctionBegin;
736   PetscValidPointer(part, 2);
737   *part = NULL;
738   ierr = DMInitializePackage();CHKERRQ(ierr);
739 
740   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
741   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
742   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
743 
744   p->edgeCut = 0;
745   p->balance = 0.0;
746 
747   *part = p;
748   PetscFunctionReturn(0);
749 }
750 
751 /*@
752   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
753 
754   Collective on DM
755 
756   Input Parameters:
757 + part    - The PetscPartitioner
758 - dm      - The mesh DM
759 
760   Output Parameters:
761 + partSection     - The PetscSection giving the division of points by partition
762 - partition       - The list of points by partition
763 
764   Options Database:
765 . -petscpartitioner_view_graph - View the graph we are partitioning
766 
767   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
768 
769   Level: developer
770 
771 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
772 @*/
773 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
774 {
775   PetscMPIInt    size;
776   PetscErrorCode ierr;
777 
778   PetscFunctionBegin;
779   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
780   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
781   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
782   PetscValidPointer(partition, 5);
783   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
784   if (size == 1) {
785     PetscInt *points;
786     PetscInt  cStart, cEnd, c;
787 
788     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
789     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
790     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
791     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
792     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
793     for (c = cStart; c < cEnd; ++c) points[c] = c;
794     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
795     PetscFunctionReturn(0);
796   }
797   if (part->height == 0) {
798     PetscInt numVertices = 0;
799     PetscInt *start     = NULL;
800     PetscInt *adjacency = NULL;
801     IS       globalNumbering;
802 
803     if (!part->noGraph || part->viewGraph) {
804       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
805     } else {
806       const PetscInt *idxs;
807       PetscInt       p, pStart, pEnd;
808 
809       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
810       ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
811       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
812       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
813       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
814     }
815     if (part->viewGraph) {
816       PetscViewer viewer = part->viewerGraph;
817       PetscBool   isascii;
818       PetscInt    v, i;
819       PetscMPIInt rank;
820 
821       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
822       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
823       if (isascii) {
824         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
825         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
826         for (v = 0; v < numVertices; ++v) {
827           const PetscInt s = start[v];
828           const PetscInt e = start[v+1];
829 
830           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
831           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
832           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
833         }
834         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
835         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
836       }
837     }
838     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
839     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
840     ierr = PetscFree(start);CHKERRQ(ierr);
841     ierr = PetscFree(adjacency);CHKERRQ(ierr);
842     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
843       const PetscInt *globalNum;
844       const PetscInt *partIdx;
845       PetscInt *map, cStart, cEnd;
846       PetscInt *adjusted, i, localSize, offset;
847       IS    newPartition;
848 
849       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
850       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
851       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
852       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
853       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
854       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
855       for (i = cStart, offset = 0; i < cEnd; i++) {
856         if (globalNum[i - cStart] >= 0) map[offset++] = i;
857       }
858       for (i = 0; i < localSize; i++) {
859         adjusted[i] = map[partIdx[i]];
860       }
861       ierr = PetscFree(map);CHKERRQ(ierr);
862       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
863       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
864       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
865       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
866       ierr = ISDestroy(partition);CHKERRQ(ierr);
867       *partition = newPartition;
868     }
869   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
870   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
871   PetscFunctionReturn(0);
872 }
873 
874 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
875 {
876   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
877   PetscErrorCode          ierr;
878 
879   PetscFunctionBegin;
880   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
881   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
882   ierr = PetscFree(p);CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
887 {
888   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
889   PetscErrorCode          ierr;
890 
891   PetscFunctionBegin;
892   if (p->random) {
893     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
894     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
895     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
896   }
897   PetscFunctionReturn(0);
898 }
899 
900 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
901 {
902   PetscBool      iascii;
903   PetscErrorCode ierr;
904 
905   PetscFunctionBegin;
906   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
907   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
908   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
909   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
910   PetscFunctionReturn(0);
911 }
912 
913 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
914 {
915   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
916   PetscErrorCode          ierr;
917 
918   PetscFunctionBegin;
919   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
920   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
921   ierr = PetscOptionsTail();CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
926 {
927   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
928   PetscInt                np;
929   PetscErrorCode          ierr;
930 
931   PetscFunctionBegin;
932   if (p->random) {
933     PetscRandom r;
934     PetscInt   *sizes, *points, v, p;
935     PetscMPIInt rank;
936 
937     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
938     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
939     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
940     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
941     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
942     for (v = 0; v < numVertices; ++v) {points[v] = v;}
943     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
944     for (v = numVertices-1; v > 0; --v) {
945       PetscReal val;
946       PetscInt  w, tmp;
947 
948       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
949       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
950       w    = PetscFloorReal(val);
951       tmp       = points[v];
952       points[v] = points[w];
953       points[w] = tmp;
954     }
955     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
956     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
957     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
958   }
959   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
960   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
961   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
962   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
963   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
964   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
965   *partition = p->partition;
966   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969 
970 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
971 {
972   PetscFunctionBegin;
973   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
974   part->ops->view           = PetscPartitionerView_Shell;
975   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
976   part->ops->destroy        = PetscPartitionerDestroy_Shell;
977   part->ops->partition      = PetscPartitionerPartition_Shell;
978   PetscFunctionReturn(0);
979 }
980 
981 /*MC
982   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
983 
984   Level: intermediate
985 
986 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
987 M*/
988 
989 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
990 {
991   PetscPartitioner_Shell *p;
992   PetscErrorCode          ierr;
993 
994   PetscFunctionBegin;
995   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
996   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
997   part->data = p;
998 
999   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1000   p->random = PETSC_FALSE;
1001   PetscFunctionReturn(0);
1002 }
1003 
1004 /*@C
1005   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1006 
1007   Collective on Part
1008 
1009   Input Parameters:
1010 + part   - The PetscPartitioner
1011 . size   - The number of partitions
1012 . sizes  - array of size size (or NULL) providing the number of points in each partition
1013 - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1014 
1015   Level: developer
1016 
1017   Notes:
1018 
1019     It is safe to free the sizes and points arrays after use in this routine.
1020 
1021 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1022 @*/
1023 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1024 {
1025   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1026   PetscInt                proc, numPoints;
1027   PetscErrorCode          ierr;
1028 
1029   PetscFunctionBegin;
1030   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1031   if (sizes)  {PetscValidPointer(sizes, 3);}
1032   if (points) {PetscValidPointer(points, 4);}
1033   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1034   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1035   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1036   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1037   if (sizes) {
1038     for (proc = 0; proc < size; ++proc) {
1039       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1040     }
1041   }
1042   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1043   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1044   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1045   PetscFunctionReturn(0);
1046 }
1047 
1048 /*@
1049   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1050 
1051   Collective on Part
1052 
1053   Input Parameters:
1054 + part   - The PetscPartitioner
1055 - random - The flag to use a random partition
1056 
1057   Level: intermediate
1058 
1059 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1060 @*/
1061 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1062 {
1063   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1064 
1065   PetscFunctionBegin;
1066   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1067   p->random = random;
1068   PetscFunctionReturn(0);
1069 }
1070 
1071 /*@
1072   PetscPartitionerShellGetRandom - get the flag to use a random partition
1073 
1074   Collective on Part
1075 
1076   Input Parameter:
1077 . part   - The PetscPartitioner
1078 
1079   Output Parameter
1080 . random - The flag to use a random partition
1081 
1082   Level: intermediate
1083 
1084 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1085 @*/
1086 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1087 {
1088   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1089 
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1092   PetscValidPointer(random, 2);
1093   *random = p->random;
1094   PetscFunctionReturn(0);
1095 }
1096 
1097 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1098 {
1099   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1100   PetscErrorCode          ierr;
1101 
1102   PetscFunctionBegin;
1103   ierr = PetscFree(p);CHKERRQ(ierr);
1104   PetscFunctionReturn(0);
1105 }
1106 
1107 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1108 {
1109   PetscFunctionBegin;
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1114 {
1115   PetscBool      iascii;
1116   PetscErrorCode ierr;
1117 
1118   PetscFunctionBegin;
1119   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1120   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1121   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1122   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1123   PetscFunctionReturn(0);
1124 }
1125 
1126 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1127 {
1128   MPI_Comm       comm;
1129   PetscInt       np;
1130   PetscMPIInt    size;
1131   PetscErrorCode ierr;
1132 
1133   PetscFunctionBegin;
1134   comm = PetscObjectComm((PetscObject)part);
1135   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1136   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1137   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1138   if (size == 1) {
1139     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1140   } else {
1141     PetscMPIInt rank;
1142     PetscInt nvGlobal, *offsets, myFirst, myLast;
1143 
1144     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1145     offsets[0] = 0;
1146     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1147     for (np = 2; np <= size; np++) {
1148       offsets[np] += offsets[np-1];
1149     }
1150     nvGlobal = offsets[size];
1151     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1152     myFirst = offsets[rank];
1153     myLast  = offsets[rank + 1] - 1;
1154     ierr = PetscFree(offsets);CHKERRQ(ierr);
1155     if (numVertices) {
1156       PetscInt firstPart = 0, firstLargePart = 0;
1157       PetscInt lastPart = 0, lastLargePart = 0;
1158       PetscInt rem = nvGlobal % nparts;
1159       PetscInt pSmall = nvGlobal/nparts;
1160       PetscInt pBig = nvGlobal/nparts + 1;
1161 
1162 
1163       if (rem) {
1164         firstLargePart = myFirst / pBig;
1165         lastLargePart  = myLast  / pBig;
1166 
1167         if (firstLargePart < rem) {
1168           firstPart = firstLargePart;
1169         } else {
1170           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1171         }
1172         if (lastLargePart < rem) {
1173           lastPart = lastLargePart;
1174         } else {
1175           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1176         }
1177       } else {
1178         firstPart = myFirst / (nvGlobal/nparts);
1179         lastPart  = myLast  / (nvGlobal/nparts);
1180       }
1181 
1182       for (np = firstPart; np <= lastPart; np++) {
1183         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1184         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1185 
1186         PartStart = PetscMax(PartStart,myFirst);
1187         PartEnd   = PetscMin(PartEnd,myLast+1);
1188         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1189       }
1190     }
1191   }
1192   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1197 {
1198   PetscFunctionBegin;
1199   part->noGraph        = PETSC_TRUE;
1200   part->ops->view      = PetscPartitionerView_Simple;
1201   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1202   part->ops->partition = PetscPartitionerPartition_Simple;
1203   PetscFunctionReturn(0);
1204 }
1205 
1206 /*MC
1207   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1208 
1209   Level: intermediate
1210 
1211 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1212 M*/
1213 
1214 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1215 {
1216   PetscPartitioner_Simple *p;
1217   PetscErrorCode           ierr;
1218 
1219   PetscFunctionBegin;
1220   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1221   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1222   part->data = p;
1223 
1224   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1225   PetscFunctionReturn(0);
1226 }
1227 
1228 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1229 {
1230   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1231   PetscErrorCode          ierr;
1232 
1233   PetscFunctionBegin;
1234   ierr = PetscFree(p);CHKERRQ(ierr);
1235   PetscFunctionReturn(0);
1236 }
1237 
1238 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1239 {
1240   PetscFunctionBegin;
1241   PetscFunctionReturn(0);
1242 }
1243 
1244 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1245 {
1246   PetscBool      iascii;
1247   PetscErrorCode ierr;
1248 
1249   PetscFunctionBegin;
1250   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1251   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1252   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1253   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1258 {
1259   PetscInt       np;
1260   PetscErrorCode ierr;
1261 
1262   PetscFunctionBegin;
1263   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1264   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1265   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1266   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1267   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1268   PetscFunctionReturn(0);
1269 }
1270 
1271 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1272 {
1273   PetscFunctionBegin;
1274   part->noGraph        = PETSC_TRUE;
1275   part->ops->view      = PetscPartitionerView_Gather;
1276   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1277   part->ops->partition = PetscPartitionerPartition_Gather;
1278   PetscFunctionReturn(0);
1279 }
1280 
1281 /*MC
1282   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1283 
1284   Level: intermediate
1285 
1286 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1287 M*/
1288 
1289 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1290 {
1291   PetscPartitioner_Gather *p;
1292   PetscErrorCode           ierr;
1293 
1294   PetscFunctionBegin;
1295   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1296   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1297   part->data = p;
1298 
1299   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1300   PetscFunctionReturn(0);
1301 }
1302 
1303 
1304 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1305 {
1306   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1307   PetscErrorCode          ierr;
1308 
1309   PetscFunctionBegin;
1310   ierr = PetscFree(p);CHKERRQ(ierr);
1311   PetscFunctionReturn(0);
1312 }
1313 
1314 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1315 {
1316   PetscFunctionBegin;
1317   PetscFunctionReturn(0);
1318 }
1319 
1320 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1321 {
1322   PetscBool      iascii;
1323   PetscErrorCode ierr;
1324 
1325   PetscFunctionBegin;
1326   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1327   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1328   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1329   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1330   PetscFunctionReturn(0);
1331 }
1332 
1333 #if defined(PETSC_HAVE_CHACO)
1334 #if defined(PETSC_HAVE_UNISTD_H)
1335 #include <unistd.h>
1336 #endif
1337 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1338 #include <chaco.h>
1339 #else
1340 /* Older versions of Chaco do not have an include file */
1341 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1342                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1343                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1344                        int mesh_dims[3], double *goal, int global_method, int local_method,
1345                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1346 #endif
1347 extern int FREE_GRAPH;
1348 #endif
1349 
1350 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1351 {
1352 #if defined(PETSC_HAVE_CHACO)
1353   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1354   MPI_Comm       comm;
1355   int            nvtxs          = numVertices; /* number of vertices in full graph */
1356   int           *vwgts          = NULL;   /* weights for all vertices */
1357   float         *ewgts          = NULL;   /* weights for all edges */
1358   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1359   char          *outassignname  = NULL;   /*  name of assignment output file */
1360   char          *outfilename    = NULL;   /* output file name */
1361   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1362   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1363   int            mesh_dims[3];            /* dimensions of mesh of processors */
1364   double        *goal          = NULL;    /* desired set sizes for each set */
1365   int            global_method = 1;       /* global partitioning algorithm */
1366   int            local_method  = 1;       /* local partitioning algorithm */
1367   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1368   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1369   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1370   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1371   long           seed          = 123636512; /* for random graph mutations */
1372 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1373   int           *assignment;              /* Output partition */
1374 #else
1375   short int     *assignment;              /* Output partition */
1376 #endif
1377   int            fd_stdout, fd_pipe[2];
1378   PetscInt      *points;
1379   int            i, v, p;
1380   PetscErrorCode ierr;
1381 
1382   PetscFunctionBegin;
1383   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1384 #if defined (PETSC_USE_DEBUG)
1385   {
1386     int ival,isum;
1387     PetscBool distributed;
1388 
1389     ival = (numVertices > 0);
1390     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1391     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1392     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1393   }
1394 #endif
1395   if (!numVertices) {
1396     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1397     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1398     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1399     PetscFunctionReturn(0);
1400   }
1401   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1402   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1403 
1404   if (global_method == INERTIAL_METHOD) {
1405     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1406     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1407   }
1408   mesh_dims[0] = nparts;
1409   mesh_dims[1] = 1;
1410   mesh_dims[2] = 1;
1411   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1412   /* Chaco outputs to stdout. We redirect this to a buffer. */
1413   /* TODO: check error codes for UNIX calls */
1414 #if defined(PETSC_HAVE_UNISTD_H)
1415   {
1416     int piperet;
1417     piperet = pipe(fd_pipe);
1418     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1419     fd_stdout = dup(1);
1420     close(1);
1421     dup2(fd_pipe[1], 1);
1422   }
1423 #endif
1424   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1425                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1426                    vmax, ndims, eigtol, seed);
1427 #if defined(PETSC_HAVE_UNISTD_H)
1428   {
1429     char msgLog[10000];
1430     int  count;
1431 
1432     fflush(stdout);
1433     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1434     if (count < 0) count = 0;
1435     msgLog[count] = 0;
1436     close(1);
1437     dup2(fd_stdout, 1);
1438     close(fd_stdout);
1439     close(fd_pipe[0]);
1440     close(fd_pipe[1]);
1441     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1442   }
1443 #else
1444   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1445 #endif
1446   /* Convert to PetscSection+IS */
1447   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1448   for (v = 0; v < nvtxs; ++v) {
1449     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1450   }
1451   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1452   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1453   for (p = 0, i = 0; p < nparts; ++p) {
1454     for (v = 0; v < nvtxs; ++v) {
1455       if (assignment[v] == p) points[i++] = v;
1456     }
1457   }
1458   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1459   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1460   if (global_method == INERTIAL_METHOD) {
1461     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1462   }
1463   ierr = PetscFree(assignment);CHKERRQ(ierr);
1464   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1465   PetscFunctionReturn(0);
1466 #else
1467   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1468 #endif
1469 }
1470 
1471 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1472 {
1473   PetscFunctionBegin;
1474   part->noGraph        = PETSC_FALSE;
1475   part->ops->view      = PetscPartitionerView_Chaco;
1476   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1477   part->ops->partition = PetscPartitionerPartition_Chaco;
1478   PetscFunctionReturn(0);
1479 }
1480 
1481 /*MC
1482   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1483 
1484   Level: intermediate
1485 
1486 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1487 M*/
1488 
1489 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1490 {
1491   PetscPartitioner_Chaco *p;
1492   PetscErrorCode          ierr;
1493 
1494   PetscFunctionBegin;
1495   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1496   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1497   part->data = p;
1498 
1499   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1500   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1501   PetscFunctionReturn(0);
1502 }
1503 
1504 static const char *ptypes[] = {"kway", "rb"};
1505 
1506 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1507 {
1508   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1509   PetscErrorCode             ierr;
1510 
1511   PetscFunctionBegin;
1512   ierr = PetscFree(p);CHKERRQ(ierr);
1513   PetscFunctionReturn(0);
1514 }
1515 
1516 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1517 {
1518   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1519   PetscErrorCode             ierr;
1520 
1521   PetscFunctionBegin;
1522   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1523   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1524   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1525   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1526   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1527   PetscFunctionReturn(0);
1528 }
1529 
1530 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1531 {
1532   PetscBool      iascii;
1533   PetscErrorCode ierr;
1534 
1535   PetscFunctionBegin;
1536   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1537   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1538   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1539   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1540   PetscFunctionReturn(0);
1541 }
1542 
1543 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1544 {
1545   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1546   PetscInt                  p_randomSeed = -1; /* TODO: Add this to PetscPartitioner_ParMetis */
1547   PetscErrorCode            ierr;
1548 
1549   PetscFunctionBegin;
1550   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1551   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1552   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1553   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1554   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p_randomSeed, &p_randomSeed, NULL);CHKERRQ(ierr);
1555   ierr = PetscOptionsTail();CHKERRQ(ierr);
1556   PetscFunctionReturn(0);
1557 }
1558 
1559 #if defined(PETSC_HAVE_PARMETIS)
1560 #include <parmetis.h>
1561 #endif
1562 
1563 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1564 {
1565 #if defined(PETSC_HAVE_PARMETIS)
1566   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1567   MPI_Comm       comm;
1568   PetscSection   section;
1569   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1570   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1571   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1572   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1573   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1574   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1575   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1576   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1577   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1578   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1579   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1580   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1581   PetscInt       options[64];               /* Options */
1582   /* Outputs */
1583   PetscInt       v, i, *assignment, *points;
1584   PetscMPIInt    size, rank, p;
1585   PetscInt       pm_randomSeed = -1;
1586   PetscErrorCode ierr;
1587 
1588   PetscFunctionBegin;
1589   ierr = PetscOptionsGetInt(((PetscObject)part)->options,((PetscObject)part)->prefix,"-petscpartitioner_parmetis_seed", &pm_randomSeed, NULL);CHKERRQ(ierr);
1590   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1591   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1592   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1593   /* Calculate vertex distribution */
1594   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1595   vtxdist[0] = 0;
1596   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1597   for (p = 2; p <= size; ++p) {
1598     vtxdist[p] += vtxdist[p-1];
1599   }
1600   /* Calculate partition weights */
1601   for (p = 0; p < nparts; ++p) {
1602     tpwgts[p] = 1.0/nparts;
1603   }
1604   ubvec[0] = pm->imbalanceRatio;
1605   /* Weight cells by dofs on cell by default */
1606   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1607   if (section) {
1608     PetscInt cStart, cEnd, dof;
1609 
1610     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1611     for (v = cStart; v < cEnd; ++v) {
1612       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1613       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1614       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1615       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1616     }
1617   } else {
1618     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1619   }
1620   wgtflag |= 2; /* have weights on graph vertices */
1621 
1622   if (nparts == 1) {
1623     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1624   } else {
1625     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1626     if (vtxdist[p+1] == vtxdist[size]) {
1627       if (rank == p) {
1628         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1629         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1630         options[METIS_OPTION_SEED]   = pm_randomSeed;
1631         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1632         if (metis_ptype == 1) {
1633           PetscStackPush("METIS_PartGraphRecursive");
1634           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1635           PetscStackPop;
1636           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1637         } else {
1638           /*
1639            It would be nice to activate the two options below, but they would need some actual testing.
1640            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1641            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1642           */
1643           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1644           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1645           PetscStackPush("METIS_PartGraphKway");
1646           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1647           PetscStackPop;
1648           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1649         }
1650       }
1651     } else {
1652       options[0] = 1; /*use options */
1653       options[1] = pm->debugFlag;
1654       options[2] = (pm_randomSeed == -1) ? 15 : pm_randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1655       PetscStackPush("ParMETIS_V3_PartKway");
1656       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1657       PetscStackPop;
1658       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1659     }
1660   }
1661   /* Convert to PetscSection+IS */
1662   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1663   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1664   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1665   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1666   for (p = 0, i = 0; p < nparts; ++p) {
1667     for (v = 0; v < nvtxs; ++v) {
1668       if (assignment[v] == p) points[i++] = v;
1669     }
1670   }
1671   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1672   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1673   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1674   PetscFunctionReturn(0);
1675 #else
1676   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1677 #endif
1678 }
1679 
1680 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1681 {
1682   PetscFunctionBegin;
1683   part->noGraph             = PETSC_FALSE;
1684   part->ops->view           = PetscPartitionerView_ParMetis;
1685   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1686   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1687   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1688   PetscFunctionReturn(0);
1689 }
1690 
1691 /*MC
1692   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1693 
1694   Level: intermediate
1695 
1696 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1697 M*/
1698 
1699 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1700 {
1701   PetscPartitioner_ParMetis *p;
1702   PetscErrorCode          ierr;
1703 
1704   PetscFunctionBegin;
1705   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1706   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1707   part->data = p;
1708 
1709   p->ptype          = 0;
1710   p->imbalanceRatio = 1.05;
1711   p->debugFlag      = 0;
1712 
1713   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1714   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1715   PetscFunctionReturn(0);
1716 }
1717 
1718 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1719 const char PTScotchPartitionerCitation[] =
1720   "@article{PTSCOTCH,\n"
1721   "  author  = {C. Chevalier and F. Pellegrini},\n"
1722   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1723   "  journal = {Parallel Computing},\n"
1724   "  volume  = {34},\n"
1725   "  number  = {6},\n"
1726   "  pages   = {318--331},\n"
1727   "  year    = {2008},\n"
1728   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1729   "}\n";
1730 
1731 typedef struct {
1732   PetscInt  strategy;
1733   PetscReal imbalance;
1734 } PetscPartitioner_PTScotch;
1735 
1736 static const char *const
1737 PTScotchStrategyList[] = {
1738   "DEFAULT",
1739   "QUALITY",
1740   "SPEED",
1741   "BALANCE",
1742   "SAFETY",
1743   "SCALABILITY",
1744   "RECURSIVE",
1745   "REMAP"
1746 };
1747 
1748 #if defined(PETSC_HAVE_PTSCOTCH)
1749 
1750 EXTERN_C_BEGIN
1751 #include <ptscotch.h>
1752 EXTERN_C_END
1753 
1754 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1755 
1756 static int PTScotch_Strategy(PetscInt strategy)
1757 {
1758   switch (strategy) {
1759   case  0: return SCOTCH_STRATDEFAULT;
1760   case  1: return SCOTCH_STRATQUALITY;
1761   case  2: return SCOTCH_STRATSPEED;
1762   case  3: return SCOTCH_STRATBALANCE;
1763   case  4: return SCOTCH_STRATSAFETY;
1764   case  5: return SCOTCH_STRATSCALABILITY;
1765   case  6: return SCOTCH_STRATRECURSIVE;
1766   case  7: return SCOTCH_STRATREMAP;
1767   default: return SCOTCH_STRATDEFAULT;
1768   }
1769 }
1770 
1771 
1772 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1773                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1774 {
1775   SCOTCH_Graph   grafdat;
1776   SCOTCH_Strat   stradat;
1777   SCOTCH_Num     vertnbr = n;
1778   SCOTCH_Num     edgenbr = xadj[n];
1779   SCOTCH_Num*    velotab = vtxwgt;
1780   SCOTCH_Num*    edlotab = adjwgt;
1781   SCOTCH_Num     flagval = strategy;
1782   double         kbalval = imbalance;
1783   PetscErrorCode ierr;
1784 
1785   PetscFunctionBegin;
1786   {
1787     PetscBool flg = PETSC_TRUE;
1788     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1789     if (!flg) velotab = NULL;
1790   }
1791   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1792   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1793   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1794   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1795 #if defined(PETSC_USE_DEBUG)
1796   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1797 #endif
1798   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1799   SCOTCH_stratExit(&stradat);
1800   SCOTCH_graphExit(&grafdat);
1801   PetscFunctionReturn(0);
1802 }
1803 
1804 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1805                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1806 {
1807   PetscMPIInt     procglbnbr;
1808   PetscMPIInt     proclocnum;
1809   SCOTCH_Arch     archdat;
1810   SCOTCH_Dgraph   grafdat;
1811   SCOTCH_Dmapping mappdat;
1812   SCOTCH_Strat    stradat;
1813   SCOTCH_Num      vertlocnbr;
1814   SCOTCH_Num      edgelocnbr;
1815   SCOTCH_Num*     veloloctab = vtxwgt;
1816   SCOTCH_Num*     edloloctab = adjwgt;
1817   SCOTCH_Num      flagval = strategy;
1818   double          kbalval = imbalance;
1819   PetscErrorCode  ierr;
1820 
1821   PetscFunctionBegin;
1822   {
1823     PetscBool flg = PETSC_TRUE;
1824     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1825     if (!flg) veloloctab = NULL;
1826   }
1827   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1828   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1829   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1830   edgelocnbr = xadj[vertlocnbr];
1831 
1832   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1833   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1834 #if defined(PETSC_USE_DEBUG)
1835   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1836 #endif
1837   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1838   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1839   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1840   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1841   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1842 
1843   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1844   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1845   SCOTCH_archExit(&archdat);
1846   SCOTCH_stratExit(&stradat);
1847   SCOTCH_dgraphExit(&grafdat);
1848   PetscFunctionReturn(0);
1849 }
1850 
1851 #endif /* PETSC_HAVE_PTSCOTCH */
1852 
1853 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1854 {
1855   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1856   PetscErrorCode             ierr;
1857 
1858   PetscFunctionBegin;
1859   ierr = PetscFree(p);CHKERRQ(ierr);
1860   PetscFunctionReturn(0);
1861 }
1862 
1863 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1864 {
1865   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1866   PetscErrorCode            ierr;
1867 
1868   PetscFunctionBegin;
1869   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1870   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1871   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1872   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1873   PetscFunctionReturn(0);
1874 }
1875 
1876 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1877 {
1878   PetscBool      iascii;
1879   PetscErrorCode ierr;
1880 
1881   PetscFunctionBegin;
1882   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1883   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1884   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1885   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1886   PetscFunctionReturn(0);
1887 }
1888 
1889 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1890 {
1891   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1892   const char *const         *slist = PTScotchStrategyList;
1893   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1894   PetscBool                 flag;
1895   PetscErrorCode            ierr;
1896 
1897   PetscFunctionBegin;
1898   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1899   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1900   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1901   ierr = PetscOptionsTail();CHKERRQ(ierr);
1902   PetscFunctionReturn(0);
1903 }
1904 
1905 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1906 {
1907 #if defined(PETSC_HAVE_PTSCOTCH)
1908   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1909   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1910   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1911   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1912   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1913   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1914   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1915   PetscInt       v, i, *assignment, *points;
1916   PetscMPIInt    size, rank, p;
1917   PetscErrorCode ierr;
1918 
1919   PetscFunctionBegin;
1920   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1921   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1922   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1923 
1924   /* Calculate vertex distribution */
1925   vtxdist[0] = 0;
1926   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1927   for (p = 2; p <= size; ++p) {
1928     vtxdist[p] += vtxdist[p-1];
1929   }
1930 
1931   if (nparts == 1) {
1932     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1933   } else {
1934     PetscSection section;
1935     /* Weight cells by dofs on cell by default */
1936     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1937     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1938     if (section) {
1939       PetscInt vStart, eEnd, dof;
1940       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &eEnd);CHKERRQ(ierr);
1941       for (v = vStart; v < eEnd; ++v) {
1942         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1943         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1944         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1945         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1946       }
1947     } else {
1948       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1949     }
1950     {
1951       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1952       int                       strat = PTScotch_Strategy(pts->strategy);
1953       double                    imbal = (double)pts->imbalance;
1954 
1955       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1956       if (vtxdist[p+1] == vtxdist[size]) {
1957         if (rank == p) {
1958           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1959         }
1960       } else {
1961         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1962       }
1963     }
1964     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1965   }
1966 
1967   /* Convert to PetscSection+IS */
1968   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1969   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1970   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1971   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1972   for (p = 0, i = 0; p < nparts; ++p) {
1973     for (v = 0; v < nvtxs; ++v) {
1974       if (assignment[v] == p) points[i++] = v;
1975     }
1976   }
1977   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1978   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1979 
1980   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1981   PetscFunctionReturn(0);
1982 #else
1983   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1984 #endif
1985 }
1986 
1987 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1988 {
1989   PetscFunctionBegin;
1990   part->noGraph             = PETSC_FALSE;
1991   part->ops->view           = PetscPartitionerView_PTScotch;
1992   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1993   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1994   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1995   PetscFunctionReturn(0);
1996 }
1997 
1998 /*MC
1999   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2000 
2001   Level: intermediate
2002 
2003 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2004 M*/
2005 
2006 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2007 {
2008   PetscPartitioner_PTScotch *p;
2009   PetscErrorCode          ierr;
2010 
2011   PetscFunctionBegin;
2012   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2013   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2014   part->data = p;
2015 
2016   p->strategy  = 0;
2017   p->imbalance = 0.01;
2018 
2019   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2020   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2021   PetscFunctionReturn(0);
2022 }
2023 
2024 
2025 /*@
2026   DMPlexGetPartitioner - Get the mesh partitioner
2027 
2028   Not collective
2029 
2030   Input Parameter:
2031 . dm - The DM
2032 
2033   Output Parameter:
2034 . part - The PetscPartitioner
2035 
2036   Level: developer
2037 
2038   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2039 
2040 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
2041 @*/
2042 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2043 {
2044   DM_Plex       *mesh = (DM_Plex *) dm->data;
2045 
2046   PetscFunctionBegin;
2047   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2048   PetscValidPointer(part, 2);
2049   *part = mesh->partitioner;
2050   PetscFunctionReturn(0);
2051 }
2052 
2053 /*@
2054   DMPlexSetPartitioner - Set the mesh partitioner
2055 
2056   logically collective on dm and part
2057 
2058   Input Parameters:
2059 + dm - The DM
2060 - part - The partitioner
2061 
2062   Level: developer
2063 
2064   Note: Any existing PetscPartitioner will be destroyed.
2065 
2066 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2067 @*/
2068 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2069 {
2070   DM_Plex       *mesh = (DM_Plex *) dm->data;
2071   PetscErrorCode ierr;
2072 
2073   PetscFunctionBegin;
2074   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2075   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2076   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2077   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2078   mesh->partitioner = part;
2079   PetscFunctionReturn(0);
2080 }
2081 
2082 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2083 {
2084   PetscErrorCode ierr;
2085 
2086   PetscFunctionBegin;
2087   if (up) {
2088     PetscInt parent;
2089 
2090     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2091     if (parent != point) {
2092       PetscInt closureSize, *closure = NULL, i;
2093 
2094       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2095       for (i = 0; i < closureSize; i++) {
2096         PetscInt cpoint = closure[2*i];
2097 
2098         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2099         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2100       }
2101       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2102     }
2103   }
2104   if (down) {
2105     PetscInt numChildren;
2106     const PetscInt *children;
2107 
2108     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2109     if (numChildren) {
2110       PetscInt i;
2111 
2112       for (i = 0; i < numChildren; i++) {
2113         PetscInt cpoint = children[i];
2114 
2115         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2116         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2117       }
2118     }
2119   }
2120   PetscFunctionReturn(0);
2121 }
2122 
2123 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
2124 
2125 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2126 {
2127   DM_Plex        *mesh = (DM_Plex *)dm->data;
2128   PetscBool      hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2129   PetscInt       *closure = NULL, closureSize, nelems, *elems, off = 0, p, c;
2130   PetscHSetI     ht;
2131   PetscErrorCode ierr;
2132 
2133   PetscFunctionBegin;
2134   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2135   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2136   for (p = 0; p < numPoints; ++p) {
2137     ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2138     for (c = 0; c < closureSize*2; c += 2) {
2139       ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2140       if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2141     }
2142   }
2143   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2144   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2145   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2146   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2147   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2148   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2149   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2150   PetscFunctionReturn(0);
2151 }
2152 
2153 /*@
2154   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2155 
2156   Input Parameters:
2157 + dm     - The DM
2158 - label  - DMLabel assinging ranks to remote roots
2159 
2160   Level: developer
2161 
2162 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2163 @*/
2164 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2165 {
2166   IS              rankIS,   pointIS, closureIS;
2167   const PetscInt *ranks,   *points;
2168   PetscInt        numRanks, numPoints, r;
2169   PetscErrorCode  ierr;
2170 
2171   PetscFunctionBegin;
2172   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2173   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2174   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2175   for (r = 0; r < numRanks; ++r) {
2176     const PetscInt rank = ranks[r];
2177     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2178     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2179     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2180     ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr);
2181     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2182     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2183     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2184     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2185   }
2186   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2187   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2188   PetscFunctionReturn(0);
2189 }
2190 
2191 /*@
2192   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2193 
2194   Input Parameters:
2195 + dm     - The DM
2196 - label  - DMLabel assinging ranks to remote roots
2197 
2198   Level: developer
2199 
2200 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2201 @*/
2202 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2203 {
2204   IS              rankIS,   pointIS;
2205   const PetscInt *ranks,   *points;
2206   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2207   PetscInt       *adj = NULL;
2208   PetscErrorCode  ierr;
2209 
2210   PetscFunctionBegin;
2211   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2212   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2213   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2214   for (r = 0; r < numRanks; ++r) {
2215     const PetscInt rank = ranks[r];
2216 
2217     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2218     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2219     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2220     for (p = 0; p < numPoints; ++p) {
2221       adjSize = PETSC_DETERMINE;
2222       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2223       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2224     }
2225     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2226     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2227   }
2228   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2229   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2230   ierr = PetscFree(adj);CHKERRQ(ierr);
2231   PetscFunctionReturn(0);
2232 }
2233 
2234 /*@
2235   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2236 
2237   Input Parameters:
2238 + dm     - The DM
2239 - label  - DMLabel assinging ranks to remote roots
2240 
2241   Level: developer
2242 
2243   Note: This is required when generating multi-level overlaps to capture
2244   overlap points from non-neighbouring partitions.
2245 
2246 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2247 @*/
2248 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2249 {
2250   MPI_Comm        comm;
2251   PetscMPIInt     rank;
2252   PetscSF         sfPoint;
2253   DMLabel         lblRoots, lblLeaves;
2254   IS              rankIS, pointIS;
2255   const PetscInt *ranks;
2256   PetscInt        numRanks, r;
2257   PetscErrorCode  ierr;
2258 
2259   PetscFunctionBegin;
2260   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2261   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2262   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2263   /* Pull point contributions from remote leaves into local roots */
2264   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2265   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2266   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2267   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2268   for (r = 0; r < numRanks; ++r) {
2269     const PetscInt remoteRank = ranks[r];
2270     if (remoteRank == rank) continue;
2271     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2272     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2273     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2274   }
2275   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2276   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2277   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2278   /* Push point contributions from roots into remote leaves */
2279   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2280   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2281   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2282   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2283   for (r = 0; r < numRanks; ++r) {
2284     const PetscInt remoteRank = ranks[r];
2285     if (remoteRank == rank) continue;
2286     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2287     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2288     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2289   }
2290   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2291   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2292   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2293   PetscFunctionReturn(0);
2294 }
2295 
2296 /*@
2297   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2298 
2299   Input Parameters:
2300 + dm        - The DM
2301 . rootLabel - DMLabel assinging ranks to local roots
2302 . processSF - A star forest mapping into the local index on each remote rank
2303 
2304   Output Parameter:
2305 - leafLabel - DMLabel assinging ranks to remote roots
2306 
2307   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2308   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2309 
2310   Level: developer
2311 
2312 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2313 @*/
2314 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2315 {
2316   MPI_Comm           comm;
2317   PetscMPIInt        rank, size, r;
2318   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2319   PetscSF            sfPoint;
2320   PetscSection       rootSection;
2321   PetscSFNode       *rootPoints, *leafPoints;
2322   const PetscSFNode *remote;
2323   const PetscInt    *local, *neighbors;
2324   IS                 valueIS;
2325   PetscBool          mpiOverflow = PETSC_FALSE;
2326   PetscErrorCode     ierr;
2327 
2328   PetscFunctionBegin;
2329   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2330   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2331   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2332   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2333 
2334   /* Convert to (point, rank) and use actual owners */
2335   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2336   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2337   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2338   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2339   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2340   for (n = 0; n < numNeighbors; ++n) {
2341     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2342     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2343   }
2344   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2345   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2346   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2347   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2348   for (n = 0; n < numNeighbors; ++n) {
2349     IS              pointIS;
2350     const PetscInt *points;
2351 
2352     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2353     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2354     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2355     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2356     for (p = 0; p < numPoints; ++p) {
2357       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2358       else       {l = -1;}
2359       if (l >= 0) {rootPoints[off+p] = remote[l];}
2360       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2361     }
2362     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2363     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2364   }
2365 
2366   /* Try to communicate overlap using All-to-All */
2367   if (!processSF) {
2368     PetscInt64  counter = 0;
2369     PetscBool   locOverflow = PETSC_FALSE;
2370     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2371 
2372     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2373     for (n = 0; n < numNeighbors; ++n) {
2374       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2375       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2376 #if defined(PETSC_USE_64BIT_INDICES)
2377       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2378       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2379 #endif
2380       scounts[neighbors[n]] = (PetscMPIInt) dof;
2381       sdispls[neighbors[n]] = (PetscMPIInt) off;
2382     }
2383     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2384     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2385     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2386     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2387     if (!mpiOverflow) {
2388       leafSize = (PetscInt) counter;
2389       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2390       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2391     }
2392     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2393   }
2394 
2395   /* Communicate overlap using process star forest */
2396   if (processSF || mpiOverflow) {
2397     PetscSF      procSF;
2398     PetscSFNode  *remote;
2399     PetscSection leafSection;
2400 
2401     if (processSF) {
2402       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2403       procSF = processSF;
2404     } else {
2405       ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr);
2406       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2407       ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr);
2408       ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2409     }
2410 
2411     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2412     ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2413     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2414     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2415     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2416   }
2417 
2418   for (p = 0; p < leafSize; p++) {
2419     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2420   }
2421 
2422   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2423   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2424   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2425   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2426   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2427   PetscFunctionReturn(0);
2428 }
2429 
2430 /*@
2431   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2432 
2433   Input Parameters:
2434 + dm    - The DM
2435 . label - DMLabel assinging ranks to remote roots
2436 
2437   Output Parameter:
2438 - sf    - The star forest communication context encapsulating the defined mapping
2439 
2440   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2441 
2442   Level: developer
2443 
2444 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2445 @*/
2446 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2447 {
2448   PetscMPIInt     rank, size;
2449   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2450   PetscSFNode    *remotePoints;
2451   IS              remoteRootIS;
2452   const PetscInt *remoteRoots;
2453   PetscErrorCode ierr;
2454 
2455   PetscFunctionBegin;
2456   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2457   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2458 
2459   for (numRemote = 0, n = 0; n < size; ++n) {
2460     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2461     numRemote += numPoints;
2462   }
2463   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2464   /* Put owned points first */
2465   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2466   if (numPoints > 0) {
2467     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2468     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2469     for (p = 0; p < numPoints; p++) {
2470       remotePoints[idx].index = remoteRoots[p];
2471       remotePoints[idx].rank = rank;
2472       idx++;
2473     }
2474     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2475     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2476   }
2477   /* Now add remote points */
2478   for (n = 0; n < size; ++n) {
2479     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2480     if (numPoints <= 0 || n == rank) continue;
2481     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2482     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2483     for (p = 0; p < numPoints; p++) {
2484       remotePoints[idx].index = remoteRoots[p];
2485       remotePoints[idx].rank = n;
2486       idx++;
2487     }
2488     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2489     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2490   }
2491   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2492   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2493   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2494   PetscFunctionReturn(0);
2495 }
2496 
2497 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
2498  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
2499  * them out in that case. */
2500 #if defined(PETSC_HAVE_PARMETIS)
2501 /*@C
2502 
2503   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
2504 
2505   Input parameters:
2506   + dm                - The DMPlex object.
2507   + n                 - The number of points.
2508   + pointsToRewrite   - The points in the SF whose ownership will change.
2509   + targetOwners      - New owner for each element in pointsToRewrite.
2510   + degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
2511 
2512   Level: developer
2513 
2514 @*/
2515 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
2516 {
2517   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
2518   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
2519   PetscSFNode  *leafLocationsNew;
2520   const         PetscSFNode *iremote;
2521   const         PetscInt *ilocal;
2522   PetscBool    *isLeaf;
2523   PetscSF       sf;
2524   MPI_Comm      comm;
2525   PetscMPIInt   rank, size;
2526 
2527   PetscFunctionBegin;
2528   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2529   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2530   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2531   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2532 
2533   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2534   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2535   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2536   for (i=0; i<pEnd-pStart; i++) {
2537     isLeaf[i] = PETSC_FALSE;
2538   }
2539   for (i=0; i<nleafs; i++) {
2540     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2541   }
2542 
2543   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
2544   cumSumDegrees[0] = 0;
2545   for (i=1; i<=pEnd-pStart; i++) {
2546     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
2547   }
2548   sumDegrees = cumSumDegrees[pEnd-pStart];
2549   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
2550 
2551   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
2552   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
2553   for (i=0; i<pEnd-pStart; i++) {
2554     rankOnLeafs[i] = rank;
2555   }
2556   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2557   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2558   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
2559 
2560   /* get the remote local points of my leaves */
2561   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
2562   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
2563   for (i=0; i<pEnd-pStart; i++) {
2564     points[i] = pStart+i;
2565   }
2566   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2567   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2568   ierr = PetscFree(points);CHKERRQ(ierr);
2569   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
2570   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
2571   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
2572   for (i=0; i<pEnd-pStart; i++) {
2573     newOwners[i] = -1;
2574     newNumbers[i] = -1;
2575   }
2576   {
2577     PetscInt oldNumber, newNumber, oldOwner, newOwner;
2578     for (i=0; i<n; i++) {
2579       oldNumber = pointsToRewrite[i];
2580       newNumber = -1;
2581       oldOwner = rank;
2582       newOwner = targetOwners[i];
2583       if (oldOwner == newOwner) {
2584         newNumber = oldNumber;
2585       } else {
2586         for (j=0; j<degrees[oldNumber]; j++) {
2587           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
2588             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
2589             break;
2590           }
2591         }
2592       }
2593       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
2594 
2595       newOwners[oldNumber] = newOwner;
2596       newNumbers[oldNumber] = newNumber;
2597     }
2598   }
2599   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
2600   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
2601   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
2602 
2603   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2604   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2605   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2606   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2607 
2608   /* Now count how many leafs we have on each processor. */
2609   leafCounter=0;
2610   for (i=0; i<pEnd-pStart; i++) {
2611     if (newOwners[i] >= 0) {
2612       if (newOwners[i] != rank) {
2613         leafCounter++;
2614       }
2615     } else {
2616       if (isLeaf[i]) {
2617         leafCounter++;
2618       }
2619     }
2620   }
2621 
2622   /* Now set up the new sf by creating the leaf arrays */
2623   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
2624   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
2625 
2626   leafCounter = 0;
2627   counter = 0;
2628   for (i=0; i<pEnd-pStart; i++) {
2629     if (newOwners[i] >= 0) {
2630       if (newOwners[i] != rank) {
2631         leafsNew[leafCounter] = i;
2632         leafLocationsNew[leafCounter].rank = newOwners[i];
2633         leafLocationsNew[leafCounter].index = newNumbers[i];
2634         leafCounter++;
2635       }
2636     } else {
2637       if (isLeaf[i]) {
2638         leafsNew[leafCounter] = i;
2639         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
2640         leafLocationsNew[leafCounter].index = iremote[counter].index;
2641         leafCounter++;
2642       }
2643     }
2644     if (isLeaf[i]) {
2645       counter++;
2646     }
2647   }
2648 
2649   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
2650   ierr = PetscFree(newOwners);CHKERRQ(ierr);
2651   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
2652   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
2653   PetscFunctionReturn(0);
2654 }
2655 
2656 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
2657 {
2658   PetscInt *distribution, min, max, sum, i, ierr;
2659   PetscMPIInt rank, size;
2660   PetscFunctionBegin;
2661   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2662   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2663   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
2664   for (i=0; i<n; i++) {
2665     if (part) distribution[part[i]] += vtxwgt[skip*i];
2666     else distribution[rank] += vtxwgt[skip*i];
2667   }
2668   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2669   min = distribution[0];
2670   max = distribution[0];
2671   sum = distribution[0];
2672   for (i=1; i<size; i++) {
2673     if (distribution[i]<min) min=distribution[i];
2674     if (distribution[i]>max) max=distribution[i];
2675     sum += distribution[i];
2676   }
2677   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
2678   ierr = PetscFree(distribution);CHKERRQ(ierr);
2679   PetscFunctionReturn(0);
2680 }
2681 
2682 #endif
2683 
2684 /*@
2685   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
2686 
2687   Input parameters:
2688   + dm               - The DMPlex object.
2689   + entityDepth      - depth of the entity to balance (0 -> balance vertices).
2690   + useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
2691   + parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
2692 
2693   Output parameters:
2694   + success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
2695 
2696   Level: user
2697 
2698 @*/
2699 
2700 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
2701 {
2702 #if defined(PETSC_HAVE_PARMETIS)
2703   PetscSF     sf;
2704   PetscInt    ierr, i, j, idx, jdx;
2705   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
2706   const       PetscInt *degrees, *ilocal;
2707   const       PetscSFNode *iremote;
2708   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
2709   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
2710   PetscMPIInt rank, size;
2711   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
2712   const       PetscInt *cumSumVertices;
2713   PetscInt    offset, counter;
2714   PetscInt    lenadjncy;
2715   PetscInt    *xadj, *adjncy, *vtxwgt;
2716   PetscInt    lenxadj;
2717   PetscInt    *adjwgt = NULL;
2718   PetscInt    *part, *options;
2719   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
2720   real_t      *ubvec;
2721   PetscInt    *firstVertices, *renumbering;
2722   PetscInt    failed, failedGlobal;
2723   MPI_Comm    comm;
2724   Mat         A, Apre;
2725   const char *prefix = NULL;
2726   PetscViewer       viewer;
2727   PetscViewerFormat format;
2728   PetscLayout layout;
2729 
2730   PetscFunctionBegin;
2731   if (success) *success = PETSC_FALSE;
2732   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2733   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2734   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2735   if (size==1) PetscFunctionReturn(0);
2736 
2737   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
2738 
2739   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
2740   if (viewer) {
2741     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
2742   }
2743 
2744   /* Figure out all points in the plex that we are interested in balancing. */
2745   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
2746   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2747   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
2748 
2749   for (i=0; i<pEnd-pStart; i++) {
2750     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
2751   }
2752 
2753   /* There are three types of points:
2754    * exclusivelyOwned: points that are owned by this process and only seen by this process
2755    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
2756    * leaf: a point that is seen by this process but owned by a different process
2757    */
2758   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2759   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2760   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2761   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
2762   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
2763   for (i=0; i<pEnd-pStart; i++) {
2764     isNonExclusivelyOwned[i] = PETSC_FALSE;
2765     isExclusivelyOwned[i] = PETSC_FALSE;
2766     isLeaf[i] = PETSC_FALSE;
2767   }
2768 
2769   /* start by marking all the leafs */
2770   for (i=0; i<nleafs; i++) {
2771     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2772   }
2773 
2774   /* for an owned point, we can figure out whether another processor sees it or
2775    * not by calculating its degree */
2776   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
2777   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
2778 
2779   numExclusivelyOwned = 0;
2780   numNonExclusivelyOwned = 0;
2781   for (i=0; i<pEnd-pStart; i++) {
2782     if (toBalance[i]) {
2783       if (degrees[i] > 0) {
2784         isNonExclusivelyOwned[i] = PETSC_TRUE;
2785         numNonExclusivelyOwned += 1;
2786       } else {
2787         if (!isLeaf[i]) {
2788           isExclusivelyOwned[i] = PETSC_TRUE;
2789           numExclusivelyOwned += 1;
2790         }
2791       }
2792     }
2793   }
2794 
2795   /* We are going to build a graph with one vertex per core representing the
2796    * exclusively owned points and then one vertex per nonExclusively owned
2797    * point. */
2798 
2799   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2800   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
2801   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2802   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
2803 
2804   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
2805   offset = cumSumVertices[rank];
2806   counter = 0;
2807   for (i=0; i<pEnd-pStart; i++) {
2808     if (toBalance[i]) {
2809       if (degrees[i] > 0) {
2810         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
2811         counter++;
2812       }
2813     }
2814   }
2815 
2816   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
2817   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
2818   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
2819   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
2820 
2821   /* Now start building the data structure for ParMETIS */
2822 
2823   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
2824   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
2825   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
2826   ierr = MatSetUp(Apre);CHKERRQ(ierr);
2827 
2828   for (i=0; i<pEnd-pStart; i++) {
2829     if (toBalance[i]) {
2830       idx = cumSumVertices[rank];
2831       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
2832       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
2833       else continue;
2834       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
2835       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
2836     }
2837   }
2838 
2839   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2840   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2841 
2842   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
2843   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
2844   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
2845   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
2846   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
2847 
2848   for (i=0; i<pEnd-pStart; i++) {
2849     if (toBalance[i]) {
2850       idx = cumSumVertices[rank];
2851       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
2852       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
2853       else continue;
2854       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
2855       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
2856     }
2857   }
2858 
2859   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2860   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2861   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
2862   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
2863 
2864   nparts = size;
2865   wgtflag = 2;
2866   numflag = 0;
2867   ncon = 2;
2868   real_t *tpwgts;
2869   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
2870   for (i=0; i<ncon*nparts; i++) {
2871     tpwgts[i] = 1./(nparts);
2872   }
2873 
2874   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
2875   ubvec[0] = 1.01;
2876   ubvec[1] = 1.01;
2877   lenadjncy = 0;
2878   for (i=0; i<1+numNonExclusivelyOwned; i++) {
2879     PetscInt temp=0;
2880     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
2881     lenadjncy += temp;
2882     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
2883   }
2884   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
2885   lenxadj = 2 + numNonExclusivelyOwned;
2886   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
2887   xadj[0] = 0;
2888   counter = 0;
2889   for (i=0; i<1+numNonExclusivelyOwned; i++) {
2890     PetscInt        temp=0;
2891     const PetscInt *cols;
2892     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
2893     ierr = PetscMemcpy(&adjncy[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
2894     counter += temp;
2895     xadj[i+1] = counter;
2896     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
2897   }
2898 
2899   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
2900   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
2901   vtxwgt[0] = numExclusivelyOwned;
2902   if (ncon>1) vtxwgt[1] = 1;
2903   for (i=0; i<numNonExclusivelyOwned; i++) {
2904     vtxwgt[ncon*(i+1)] = 1;
2905     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
2906   }
2907 
2908   if (viewer) {
2909     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
2910     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
2911   }
2912   if (parallel) {
2913     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
2914     options[0] = 1;
2915     options[1] = 0; /* Verbosity */
2916     options[2] = 0; /* Seed */
2917     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
2918     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
2919     if (useInitialGuess) {
2920       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
2921       PetscStackPush("ParMETIS_V3_RefineKway");
2922       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
2923       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
2924       PetscStackPop;
2925     } else {
2926       PetscStackPush("ParMETIS_V3_PartKway");
2927       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
2928       PetscStackPop;
2929       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
2930     }
2931     ierr = PetscFree(options);CHKERRQ(ierr);
2932   } else {
2933     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
2934     Mat As;
2935     PetscInt numRows;
2936     PetscInt *partGlobal;
2937     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
2938 
2939     PetscInt *numExclusivelyOwnedAll;
2940     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
2941     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
2942     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
2943 
2944     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
2945     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
2946     if (!rank) {
2947       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
2948       lenadjncy = 0;
2949 
2950       for (i=0; i<numRows; i++) {
2951         PetscInt temp=0;
2952         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
2953         lenadjncy += temp;
2954         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
2955       }
2956       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
2957       lenxadj = 1 + numRows;
2958       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
2959       xadj_g[0] = 0;
2960       counter = 0;
2961       for (i=0; i<numRows; i++) {
2962         PetscInt        temp=0;
2963         const PetscInt *cols;
2964         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
2965         ierr = PetscMemcpy(&adjncy_g[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
2966         counter += temp;
2967         xadj_g[i+1] = counter;
2968         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
2969       }
2970       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
2971       for (i=0; i<size; i++){
2972         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
2973         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
2974         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
2975           vtxwgt_g[ncon*j] = 1;
2976           if (ncon>1) vtxwgt_g[2*j+1] = 0;
2977         }
2978       }
2979       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
2980       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
2981       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
2982       options[METIS_OPTION_CONTIG] = 1;
2983       PetscStackPush("METIS_PartGraphKway");
2984       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
2985       PetscStackPop;
2986       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
2987       ierr = PetscFree(options);CHKERRQ(ierr);
2988       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
2989       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
2990       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
2991     }
2992     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
2993 
2994     /* Now scatter the parts array. */
2995     {
2996       PetscMPIInt *counts, *mpiCumSumVertices;
2997       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
2998       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
2999       for(i=0; i<size; i++) {
3000         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3001       }
3002       for(i=0; i<=size; i++) {
3003         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3004       }
3005       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3006       ierr = PetscFree(counts);CHKERRQ(ierr);
3007       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3008     }
3009 
3010     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3011     ierr = MatDestroy(&As);CHKERRQ(ierr);
3012   }
3013 
3014   ierr = MatDestroy(&A);CHKERRQ(ierr);
3015   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3016   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3017 
3018   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3019 
3020   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3021   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3022   firstVertices[rank] = part[0];
3023   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3024   for (i=0; i<size; i++) {
3025     renumbering[firstVertices[i]] = i;
3026   }
3027   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3028     part[i] = renumbering[part[i]];
3029   }
3030   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3031   failed = (PetscInt)(part[0] != rank);
3032   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3033 
3034   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3035   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3036 
3037   if (failedGlobal > 0) {
3038     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3039     ierr = PetscFree(xadj);CHKERRQ(ierr);
3040     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3041     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3042     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3043     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3044     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3045     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3046     ierr = PetscFree(part);CHKERRQ(ierr);
3047     if (viewer) {
3048       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3049       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3050     }
3051     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3052     PetscFunctionReturn(0);
3053   }
3054 
3055   /*Let's check how well we did distributing points*/
3056   if (viewer) {
3057     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3058     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3059     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3060     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3061     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3062   }
3063 
3064   /* Now check that every vertex is owned by a process that it is actually connected to. */
3065   for (i=1; i<=numNonExclusivelyOwned; i++) {
3066     PetscInt loc = 0;
3067     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3068     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3069     if (loc<0) {
3070       part[i] = rank;
3071     }
3072   }
3073 
3074   /* Let's see how significant the influences of the previous fixing up step was.*/
3075   if (viewer) {
3076     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3077     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3078   }
3079 
3080   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3081   ierr = PetscFree(xadj);CHKERRQ(ierr);
3082   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3083   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3084 
3085   /* Almost done, now rewrite the SF to reflect the new ownership. */
3086   {
3087     PetscInt *pointsToRewrite;
3088     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3089     counter = 0;
3090     for(i=0; i<pEnd-pStart; i++) {
3091       if (toBalance[i]) {
3092         if (isNonExclusivelyOwned[i]) {
3093           pointsToRewrite[counter] = i + pStart;
3094           counter++;
3095         }
3096       }
3097     }
3098     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3099     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3100   }
3101 
3102   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3103   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3104   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3105   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3106   ierr = PetscFree(part);CHKERRQ(ierr);
3107   if (viewer) {
3108     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3109     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3110   }
3111   if (success) *success = PETSC_TRUE;
3112   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3113 #else
3114   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3115 #endif
3116   PetscFunctionReturn(0);
3117 }
3118