xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 7eda4a2b655d50ced66764fb8992052c2fd6e7d7)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
31 
32 /*@C
33   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
34 
35   Input Parameters:
36 + dm      - The mesh DM dm
37 - height  - Height of the strata from which to construct the graph
38 
39   Output Parameter:
40 + numVertices     - Number of vertices in the graph
41 . offsets         - Point offsets in the graph
42 . adjacency       - Point connectivity in the graph
43 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
44 
45   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
46   representation on the mesh.
47 
48   Level: developer
49 
50 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
51 @*/
52 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
53 {
54   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
55   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
56   IS             cellNumbering;
57   const PetscInt *cellNum;
58   PetscBool      useCone, useClosure;
59   PetscSection   section;
60   PetscSegBuffer adjBuffer;
61   PetscSF        sfPoint;
62   PetscInt       *adjCells = NULL, *remoteCells = NULL;
63   const PetscInt *local;
64   PetscInt       nroots, nleaves, l;
65   PetscMPIInt    rank;
66   PetscErrorCode ierr;
67 
68   PetscFunctionBegin;
69   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
70   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
71   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
72   if (dim != depth) {
73     /* We do not handle the uninterpolated case here */
74     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
75     /* DMPlexCreateNeighborCSR does not make a numbering */
76     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
77     /* Different behavior for empty graphs */
78     if (!*numVertices) {
79       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
80       (*offsets)[0] = 0;
81     }
82     /* Broken in parallel */
83     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
84     PetscFunctionReturn(0);
85   }
86   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
87   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
88   /* Build adjacency graph via a section/segbuffer */
89   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
90   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
91   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
92   /* Always use FVM adjacency to create partitioner graph */
93   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
94   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
95   ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
96   if (globalNumbering) {
97     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
98     *globalNumbering = cellNumbering;
99   }
100   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
101   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
102      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
103    */
104   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
105   if (nroots >= 0) {
106     PetscInt fStart, fEnd, f;
107 
108     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
109     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
110     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
111     for (f = fStart; f < fEnd; ++f) {
112       const PetscInt *support;
113       PetscInt        supportSize;
114 
115       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
116       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
117       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
118       else if (supportSize == 2) {
119         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
120         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
121         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
122         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
123       }
124       /* Handle non-conforming meshes */
125       if (supportSize > 2) {
126         PetscInt        numChildren, i;
127         const PetscInt *children;
128 
129         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
130         for (i = 0; i < numChildren; ++i) {
131           const PetscInt child = children[i];
132           if (fStart <= child && child < fEnd) {
133             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
134             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
135             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
136             else if (supportSize == 2) {
137               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
138               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
139               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
140               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
141             }
142           }
143         }
144       }
145     }
146     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
147     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
148     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
149     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
150     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
151   }
152   /* Combine local and global adjacencies */
153   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
154     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
155     if (nroots > 0) {if (cellNum[p] < 0) continue;}
156     /* Add remote cells */
157     if (remoteCells) {
158       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
159       PetscInt       coneSize, numChildren, c, i;
160       const PetscInt *cone, *children;
161 
162       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
163       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
164       for (c = 0; c < coneSize; ++c) {
165         const PetscInt point = cone[c];
166         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
167           PetscInt *PETSC_RESTRICT pBuf;
168           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
169           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
170           *pBuf = remoteCells[point];
171         }
172         /* Handle non-conforming meshes */
173         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
174         for (i = 0; i < numChildren; ++i) {
175           const PetscInt child = children[i];
176           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
177             PetscInt *PETSC_RESTRICT pBuf;
178             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
179             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
180             *pBuf = remoteCells[child];
181           }
182         }
183       }
184     }
185     /* Add local cells */
186     adjSize = PETSC_DETERMINE;
187     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
188     for (a = 0; a < adjSize; ++a) {
189       const PetscInt point = adj[a];
190       if (point != p && pStart <= point && point < pEnd) {
191         PetscInt *PETSC_RESTRICT pBuf;
192         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
193         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
194         *pBuf = DMPlex_GlobalID(cellNum[point]);
195       }
196     }
197     (*numVertices)++;
198   }
199   ierr = PetscFree(adj);CHKERRQ(ierr);
200   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
201   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
202 
203   /* Derive CSR graph from section/segbuffer */
204   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
205   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
206   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
207   for (idx = 0, p = pStart; p < pEnd; p++) {
208     if (nroots > 0) {if (cellNum[p] < 0) continue;}
209     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
210   }
211   vOffsets[*numVertices] = size;
212   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
213 
214   if (nroots >= 0) {
215     /* Filter out duplicate edges using section/segbuffer */
216     ierr = PetscSectionReset(section);CHKERRQ(ierr);
217     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
218     for (p = 0; p < *numVertices; p++) {
219       PetscInt start = vOffsets[p], end = vOffsets[p+1];
220       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
221       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
222       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
223       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
224       ierr = PetscMemcpy(edges, &graph[start], numEdges*sizeof(*edges));CHKERRQ(ierr);
225     }
226     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
227     ierr = PetscFree(graph);CHKERRQ(ierr);
228     /* Derive CSR graph from section/segbuffer */
229     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
230     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
231     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
232     for (idx = 0, p = 0; p < *numVertices; p++) {
233       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
234     }
235     vOffsets[*numVertices] = size;
236     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
237   } else {
238     /* Sort adjacencies (not strictly necessary) */
239     for (p = 0; p < *numVertices; p++) {
240       PetscInt start = vOffsets[p], end = vOffsets[p+1];
241       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
242     }
243   }
244 
245   if (offsets) *offsets = vOffsets;
246   if (adjacency) *adjacency = graph;
247 
248   /* Cleanup */
249   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
250   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
251   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
252   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 /*@C
257   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
258 
259   Collective
260 
261   Input Arguments:
262 + dm - The DMPlex
263 - cellHeight - The height of mesh points to treat as cells (default should be 0)
264 
265   Output Arguments:
266 + numVertices - The number of local vertices in the graph, or cells in the mesh.
267 . offsets     - The offset to the adjacency list for each cell
268 - adjacency   - The adjacency list for all cells
269 
270   Note: This is suitable for input to a mesh partitioner like ParMetis.
271 
272   Level: advanced
273 
274 .seealso: DMPlexCreate()
275 @*/
276 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
277 {
278   const PetscInt maxFaceCases = 30;
279   PetscInt       numFaceCases = 0;
280   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
281   PetscInt      *off, *adj;
282   PetscInt      *neighborCells = NULL;
283   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
284   PetscErrorCode ierr;
285 
286   PetscFunctionBegin;
287   /* For parallel partitioning, I think you have to communicate supports */
288   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
289   cellDim = dim - cellHeight;
290   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
291   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
292   if (cEnd - cStart == 0) {
293     if (numVertices) *numVertices = 0;
294     if (offsets)   *offsets   = NULL;
295     if (adjacency) *adjacency = NULL;
296     PetscFunctionReturn(0);
297   }
298   numCells  = cEnd - cStart;
299   faceDepth = depth - cellHeight;
300   if (dim == depth) {
301     PetscInt f, fStart, fEnd;
302 
303     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
304     /* Count neighboring cells */
305     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
306     for (f = fStart; f < fEnd; ++f) {
307       const PetscInt *support;
308       PetscInt        supportSize;
309       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
310       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
311       if (supportSize == 2) {
312         PetscInt numChildren;
313 
314         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
315         if (!numChildren) {
316           ++off[support[0]-cStart+1];
317           ++off[support[1]-cStart+1];
318         }
319       }
320     }
321     /* Prefix sum */
322     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
323     if (adjacency) {
324       PetscInt *tmp;
325 
326       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
327       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
328       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
329       /* Get neighboring cells */
330       for (f = fStart; f < fEnd; ++f) {
331         const PetscInt *support;
332         PetscInt        supportSize;
333         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
334         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
335         if (supportSize == 2) {
336           PetscInt numChildren;
337 
338           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
339           if (!numChildren) {
340             adj[tmp[support[0]-cStart]++] = support[1];
341             adj[tmp[support[1]-cStart]++] = support[0];
342           }
343         }
344       }
345 #if defined(PETSC_USE_DEBUG)
346       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
347 #endif
348       ierr = PetscFree(tmp);CHKERRQ(ierr);
349     }
350     if (numVertices) *numVertices = numCells;
351     if (offsets)   *offsets   = off;
352     if (adjacency) *adjacency = adj;
353     PetscFunctionReturn(0);
354   }
355   /* Setup face recognition */
356   if (faceDepth == 1) {
357     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
358 
359     for (c = cStart; c < cEnd; ++c) {
360       PetscInt corners;
361 
362       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
363       if (!cornersSeen[corners]) {
364         PetscInt nFV;
365 
366         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
367         cornersSeen[corners] = 1;
368 
369         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
370 
371         numFaceVertices[numFaceCases++] = nFV;
372       }
373     }
374   }
375   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
376   /* Count neighboring cells */
377   for (cell = cStart; cell < cEnd; ++cell) {
378     PetscInt numNeighbors = PETSC_DETERMINE, n;
379 
380     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
381     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
382     for (n = 0; n < numNeighbors; ++n) {
383       PetscInt        cellPair[2];
384       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
385       PetscInt        meetSize = 0;
386       const PetscInt *meet    = NULL;
387 
388       cellPair[0] = cell; cellPair[1] = neighborCells[n];
389       if (cellPair[0] == cellPair[1]) continue;
390       if (!found) {
391         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
392         if (meetSize) {
393           PetscInt f;
394 
395           for (f = 0; f < numFaceCases; ++f) {
396             if (numFaceVertices[f] == meetSize) {
397               found = PETSC_TRUE;
398               break;
399             }
400           }
401         }
402         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
403       }
404       if (found) ++off[cell-cStart+1];
405     }
406   }
407   /* Prefix sum */
408   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
409 
410   if (adjacency) {
411     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
412     /* Get neighboring cells */
413     for (cell = cStart; cell < cEnd; ++cell) {
414       PetscInt numNeighbors = PETSC_DETERMINE, n;
415       PetscInt cellOffset   = 0;
416 
417       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
418       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
419       for (n = 0; n < numNeighbors; ++n) {
420         PetscInt        cellPair[2];
421         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
422         PetscInt        meetSize = 0;
423         const PetscInt *meet    = NULL;
424 
425         cellPair[0] = cell; cellPair[1] = neighborCells[n];
426         if (cellPair[0] == cellPair[1]) continue;
427         if (!found) {
428           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
429           if (meetSize) {
430             PetscInt f;
431 
432             for (f = 0; f < numFaceCases; ++f) {
433               if (numFaceVertices[f] == meetSize) {
434                 found = PETSC_TRUE;
435                 break;
436               }
437             }
438           }
439           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
440         }
441         if (found) {
442           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
443           ++cellOffset;
444         }
445       }
446     }
447   }
448   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
449   if (numVertices) *numVertices = numCells;
450   if (offsets)   *offsets   = off;
451   if (adjacency) *adjacency = adj;
452   PetscFunctionReturn(0);
453 }
454 
455 /*@C
456   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
457 
458   Not Collective
459 
460   Input Parameters:
461 + name        - The name of a new user-defined creation routine
462 - create_func - The creation routine itself
463 
464   Notes:
465   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
466 
467   Sample usage:
468 .vb
469     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
470 .ve
471 
472   Then, your PetscPartitioner type can be chosen with the procedural interface via
473 .vb
474     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
475     PetscPartitionerSetType(PetscPartitioner, "my_part");
476 .ve
477    or at runtime via the option
478 .vb
479     -petscpartitioner_type my_part
480 .ve
481 
482   Level: advanced
483 
484 .keywords: PetscPartitioner, register
485 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
486 
487 @*/
488 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
489 {
490   PetscErrorCode ierr;
491 
492   PetscFunctionBegin;
493   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
494   PetscFunctionReturn(0);
495 }
496 
497 /*@C
498   PetscPartitionerSetType - Builds a particular PetscPartitioner
499 
500   Collective on PetscPartitioner
501 
502   Input Parameters:
503 + part - The PetscPartitioner object
504 - name - The kind of partitioner
505 
506   Options Database Key:
507 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
508 
509   Level: intermediate
510 
511 .keywords: PetscPartitioner, set, type
512 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
513 @*/
514 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
515 {
516   PetscErrorCode (*r)(PetscPartitioner);
517   PetscBool      match;
518   PetscErrorCode ierr;
519 
520   PetscFunctionBegin;
521   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
522   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
523   if (match) PetscFunctionReturn(0);
524 
525   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
526   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
527   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
528 
529   if (part->ops->destroy) {
530     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
531   }
532   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
533   ierr = (*r)(part);CHKERRQ(ierr);
534   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
535   PetscFunctionReturn(0);
536 }
537 
538 /*@C
539   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
540 
541   Not Collective
542 
543   Input Parameter:
544 . part - The PetscPartitioner
545 
546   Output Parameter:
547 . name - The PetscPartitioner type name
548 
549   Level: intermediate
550 
551 .keywords: PetscPartitioner, get, type, name
552 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
553 @*/
554 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
555 {
556   PetscErrorCode ierr;
557 
558   PetscFunctionBegin;
559   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
560   PetscValidPointer(name, 2);
561   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
562   *name = ((PetscObject) part)->type_name;
563   PetscFunctionReturn(0);
564 }
565 
566 /*@C
567   PetscPartitionerView - Views a PetscPartitioner
568 
569   Collective on PetscPartitioner
570 
571   Input Parameter:
572 + part - the PetscPartitioner object to view
573 - v    - the viewer
574 
575   Level: developer
576 
577 .seealso: PetscPartitionerDestroy()
578 @*/
579 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
580 {
581   PetscMPIInt    size;
582   PetscBool      isascii;
583   PetscErrorCode ierr;
584 
585   PetscFunctionBegin;
586   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
587   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
588   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
589   if (isascii) {
590     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
591     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
592     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
593     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
594     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
595     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
596     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
597   }
598   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
599   PetscFunctionReturn(0);
600 }
601 
602 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
603 {
604   PetscFunctionBegin;
605   if (!currentType) {
606 #if defined(PETSC_HAVE_CHACO)
607     *defaultType = PETSCPARTITIONERCHACO;
608 #elif defined(PETSC_HAVE_PARMETIS)
609     *defaultType = PETSCPARTITIONERPARMETIS;
610 #elif defined(PETSC_HAVE_PTSCOTCH)
611     *defaultType = PETSCPARTITIONERPTSCOTCH;
612 #else
613     *defaultType = PETSCPARTITIONERSIMPLE;
614 #endif
615   } else {
616     *defaultType = currentType;
617   }
618   PetscFunctionReturn(0);
619 }
620 
621 /*@
622   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
623 
624   Collective on PetscPartitioner
625 
626   Input Parameter:
627 . part - the PetscPartitioner object to set options for
628 
629   Level: developer
630 
631 .seealso: PetscPartitionerView()
632 @*/
633 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
634 {
635   const char    *defaultType;
636   char           name[256];
637   PetscBool      flg;
638   PetscErrorCode ierr;
639 
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
642   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
643   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
644   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
645   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
646   if (flg) {
647     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
648   } else if (!((PetscObject) part)->type_name) {
649     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
650   }
651   if (part->ops->setfromoptions) {
652     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
653   }
654   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
655   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
656   /* process any options handlers added with PetscObjectAddOptionsHandler() */
657   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
658   ierr = PetscOptionsEnd();CHKERRQ(ierr);
659   PetscFunctionReturn(0);
660 }
661 
662 /*@C
663   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
664 
665   Collective on PetscPartitioner
666 
667   Input Parameter:
668 . part - the PetscPartitioner object to setup
669 
670   Level: developer
671 
672 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
673 @*/
674 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
675 {
676   PetscErrorCode ierr;
677 
678   PetscFunctionBegin;
679   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
680   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
681   PetscFunctionReturn(0);
682 }
683 
684 /*@
685   PetscPartitionerDestroy - Destroys a PetscPartitioner object
686 
687   Collective on PetscPartitioner
688 
689   Input Parameter:
690 . part - the PetscPartitioner object to destroy
691 
692   Level: developer
693 
694 .seealso: PetscPartitionerView()
695 @*/
696 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
697 {
698   PetscErrorCode ierr;
699 
700   PetscFunctionBegin;
701   if (!*part) PetscFunctionReturn(0);
702   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
703 
704   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
705   ((PetscObject) (*part))->refct = 0;
706 
707   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
708   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
709   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
710   PetscFunctionReturn(0);
711 }
712 
713 /*@
714   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
715 
716   Collective on MPI_Comm
717 
718   Input Parameter:
719 . comm - The communicator for the PetscPartitioner object
720 
721   Output Parameter:
722 . part - The PetscPartitioner object
723 
724   Level: beginner
725 
726 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
727 @*/
728 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
729 {
730   PetscPartitioner p;
731   const char       *partitionerType = NULL;
732   PetscErrorCode   ierr;
733 
734   PetscFunctionBegin;
735   PetscValidPointer(part, 2);
736   *part = NULL;
737   ierr = DMInitializePackage();CHKERRQ(ierr);
738 
739   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
740   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
741   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
742 
743   p->edgeCut = 0;
744   p->balance = 0.0;
745 
746   *part = p;
747   PetscFunctionReturn(0);
748 }
749 
750 /*@
751   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
752 
753   Collective on DM
754 
755   Input Parameters:
756 + part    - The PetscPartitioner
757 - dm      - The mesh DM
758 
759   Output Parameters:
760 + partSection     - The PetscSection giving the division of points by partition
761 - partition       - The list of points by partition
762 
763   Options Database:
764 . -petscpartitioner_view_graph - View the graph we are partitioning
765 
766   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
767 
768   Level: developer
769 
770 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
771 @*/
772 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
773 {
774   PetscMPIInt    size;
775   PetscErrorCode ierr;
776 
777   PetscFunctionBegin;
778   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
779   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
780   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
781   PetscValidPointer(partition, 5);
782   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
783   if (size == 1) {
784     PetscInt *points;
785     PetscInt  cStart, cEnd, c;
786 
787     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
788     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
789     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
790     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
791     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
792     for (c = cStart; c < cEnd; ++c) points[c] = c;
793     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
794     PetscFunctionReturn(0);
795   }
796   if (part->height == 0) {
797     PetscInt numVertices;
798     PetscInt *start     = NULL;
799     PetscInt *adjacency = NULL;
800     IS       globalNumbering;
801 
802     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
803     if (part->viewGraph) {
804       PetscViewer viewer = part->viewerGraph;
805       PetscBool   isascii;
806       PetscInt    v, i;
807       PetscMPIInt rank;
808 
809       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
810       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
811       if (isascii) {
812         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
813         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
814         for (v = 0; v < numVertices; ++v) {
815           const PetscInt s = start[v];
816           const PetscInt e = start[v+1];
817 
818           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
819           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
820           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
821         }
822         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
823         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
824       }
825     }
826     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
827     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
828     ierr = PetscFree(start);CHKERRQ(ierr);
829     ierr = PetscFree(adjacency);CHKERRQ(ierr);
830     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
831       const PetscInt *globalNum;
832       const PetscInt *partIdx;
833       PetscInt *map, cStart, cEnd;
834       PetscInt *adjusted, i, localSize, offset;
835       IS    newPartition;
836 
837       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
838       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
839       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
840       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
841       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
842       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
843       for (i = cStart, offset = 0; i < cEnd; i++) {
844         if (globalNum[i - cStart] >= 0) map[offset++] = i;
845       }
846       for (i = 0; i < localSize; i++) {
847         adjusted[i] = map[partIdx[i]];
848       }
849       ierr = PetscFree(map);CHKERRQ(ierr);
850       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
851       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
852       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
853       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
854       ierr = ISDestroy(partition);CHKERRQ(ierr);
855       *partition = newPartition;
856     }
857   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
858   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 
862 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
863 {
864   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
865   PetscErrorCode          ierr;
866 
867   PetscFunctionBegin;
868   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
869   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
870   ierr = PetscFree(p);CHKERRQ(ierr);
871   PetscFunctionReturn(0);
872 }
873 
874 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
875 {
876   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
877   PetscErrorCode          ierr;
878 
879   PetscFunctionBegin;
880   if (p->random) {
881     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
882     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
883     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
884   }
885   PetscFunctionReturn(0);
886 }
887 
888 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
889 {
890   PetscBool      iascii;
891   PetscErrorCode ierr;
892 
893   PetscFunctionBegin;
894   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
895   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
896   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
897   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
898   PetscFunctionReturn(0);
899 }
900 
901 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
902 {
903   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
904   PetscErrorCode          ierr;
905 
906   PetscFunctionBegin;
907   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
908   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
909   ierr = PetscOptionsTail();CHKERRQ(ierr);
910   PetscFunctionReturn(0);
911 }
912 
913 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
914 {
915   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
916   PetscInt                np;
917   PetscErrorCode          ierr;
918 
919   PetscFunctionBegin;
920   if (p->random) {
921     PetscRandom r;
922     PetscInt   *sizes, *points, v, p;
923     PetscMPIInt rank;
924 
925     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
926     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
927     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
928     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
929     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
930     for (v = 0; v < numVertices; ++v) {points[v] = v;}
931     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
932     for (v = numVertices-1; v > 0; --v) {
933       PetscReal val;
934       PetscInt  w, tmp;
935 
936       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
937       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
938       w    = PetscFloorReal(val);
939       tmp       = points[v];
940       points[v] = points[w];
941       points[w] = tmp;
942     }
943     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
944     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
945     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
946   }
947   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
948   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
949   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
950   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
951   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
952   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
953   *partition = p->partition;
954   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957 
958 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
959 {
960   PetscFunctionBegin;
961   part->ops->view           = PetscPartitionerView_Shell;
962   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
963   part->ops->destroy        = PetscPartitionerDestroy_Shell;
964   part->ops->partition      = PetscPartitionerPartition_Shell;
965   PetscFunctionReturn(0);
966 }
967 
968 /*MC
969   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
970 
971   Level: intermediate
972 
973 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
974 M*/
975 
976 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
977 {
978   PetscPartitioner_Shell *p;
979   PetscErrorCode          ierr;
980 
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
983   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
984   part->data = p;
985 
986   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
987   p->random = PETSC_FALSE;
988   PetscFunctionReturn(0);
989 }
990 
991 /*@C
992   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
993 
994   Collective on Part
995 
996   Input Parameters:
997 + part     - The PetscPartitioner
998 . size - The number of partitions
999 . sizes    - array of size size (or NULL) providing the number of points in each partition
1000 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1001 
1002   Level: developer
1003 
1004   Notes:
1005 
1006     It is safe to free the sizes and points arrays after use in this routine.
1007 
1008 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1009 @*/
1010 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1011 {
1012   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1013   PetscInt                proc, numPoints;
1014   PetscErrorCode          ierr;
1015 
1016   PetscFunctionBegin;
1017   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1018   if (sizes)  {PetscValidPointer(sizes, 3);}
1019   if (points) {PetscValidPointer(points, 4);}
1020   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1021   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1022   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1023   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1024   if (sizes) {
1025     for (proc = 0; proc < size; ++proc) {
1026       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1027     }
1028   }
1029   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1030   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1031   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 /*@
1036   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1037 
1038   Collective on Part
1039 
1040   Input Parameters:
1041 + part   - The PetscPartitioner
1042 - random - The flag to use a random partition
1043 
1044   Level: intermediate
1045 
1046 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1047 @*/
1048 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1049 {
1050   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1051 
1052   PetscFunctionBegin;
1053   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1054   p->random = random;
1055   PetscFunctionReturn(0);
1056 }
1057 
1058 /*@
1059   PetscPartitionerShellGetRandom - get the flag to use a random partition
1060 
1061   Collective on Part
1062 
1063   Input Parameter:
1064 . part   - The PetscPartitioner
1065 
1066   Output Parameter
1067 . random - The flag to use a random partition
1068 
1069   Level: intermediate
1070 
1071 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1072 @*/
1073 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1074 {
1075   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1076 
1077   PetscFunctionBegin;
1078   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1079   PetscValidPointer(random, 2);
1080   *random = p->random;
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1085 {
1086   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1087   PetscErrorCode          ierr;
1088 
1089   PetscFunctionBegin;
1090   ierr = PetscFree(p);CHKERRQ(ierr);
1091   PetscFunctionReturn(0);
1092 }
1093 
1094 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1095 {
1096   PetscFunctionBegin;
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1101 {
1102   PetscBool      iascii;
1103   PetscErrorCode ierr;
1104 
1105   PetscFunctionBegin;
1106   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1107   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1108   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1109   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1114 {
1115   MPI_Comm       comm;
1116   PetscInt       np;
1117   PetscMPIInt    size;
1118   PetscErrorCode ierr;
1119 
1120   PetscFunctionBegin;
1121   comm = PetscObjectComm((PetscObject)part);
1122   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1123   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1124   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1125   if (size == 1) {
1126     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1127   } else {
1128     PetscMPIInt rank;
1129     PetscInt nvGlobal, *offsets, myFirst, myLast;
1130 
1131     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1132     offsets[0] = 0;
1133     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1134     for (np = 2; np <= size; np++) {
1135       offsets[np] += offsets[np-1];
1136     }
1137     nvGlobal = offsets[size];
1138     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1139     myFirst = offsets[rank];
1140     myLast  = offsets[rank + 1] - 1;
1141     ierr = PetscFree(offsets);CHKERRQ(ierr);
1142     if (numVertices) {
1143       PetscInt firstPart = 0, firstLargePart = 0;
1144       PetscInt lastPart = 0, lastLargePart = 0;
1145       PetscInt rem = nvGlobal % nparts;
1146       PetscInt pSmall = nvGlobal/nparts;
1147       PetscInt pBig = nvGlobal/nparts + 1;
1148 
1149 
1150       if (rem) {
1151         firstLargePart = myFirst / pBig;
1152         lastLargePart  = myLast  / pBig;
1153 
1154         if (firstLargePart < rem) {
1155           firstPart = firstLargePart;
1156         } else {
1157           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1158         }
1159         if (lastLargePart < rem) {
1160           lastPart = lastLargePart;
1161         } else {
1162           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1163         }
1164       } else {
1165         firstPart = myFirst / (nvGlobal/nparts);
1166         lastPart  = myLast  / (nvGlobal/nparts);
1167       }
1168 
1169       for (np = firstPart; np <= lastPart; np++) {
1170         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1171         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1172 
1173         PartStart = PetscMax(PartStart,myFirst);
1174         PartEnd   = PetscMin(PartEnd,myLast+1);
1175         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1176       }
1177     }
1178   }
1179   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1180   PetscFunctionReturn(0);
1181 }
1182 
1183 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1184 {
1185   PetscFunctionBegin;
1186   part->ops->view      = PetscPartitionerView_Simple;
1187   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1188   part->ops->partition = PetscPartitionerPartition_Simple;
1189   PetscFunctionReturn(0);
1190 }
1191 
1192 /*MC
1193   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1194 
1195   Level: intermediate
1196 
1197 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1198 M*/
1199 
1200 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1201 {
1202   PetscPartitioner_Simple *p;
1203   PetscErrorCode           ierr;
1204 
1205   PetscFunctionBegin;
1206   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1207   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1208   part->data = p;
1209 
1210   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1211   PetscFunctionReturn(0);
1212 }
1213 
1214 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1215 {
1216   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1217   PetscErrorCode          ierr;
1218 
1219   PetscFunctionBegin;
1220   ierr = PetscFree(p);CHKERRQ(ierr);
1221   PetscFunctionReturn(0);
1222 }
1223 
1224 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1225 {
1226   PetscFunctionBegin;
1227   PetscFunctionReturn(0);
1228 }
1229 
1230 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1231 {
1232   PetscBool      iascii;
1233   PetscErrorCode ierr;
1234 
1235   PetscFunctionBegin;
1236   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1237   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1238   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1239   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1240   PetscFunctionReturn(0);
1241 }
1242 
1243 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1244 {
1245   PetscInt       np;
1246   PetscErrorCode ierr;
1247 
1248   PetscFunctionBegin;
1249   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1250   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1251   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1252   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1253   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1258 {
1259   PetscFunctionBegin;
1260   part->ops->view      = PetscPartitionerView_Gather;
1261   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1262   part->ops->partition = PetscPartitionerPartition_Gather;
1263   PetscFunctionReturn(0);
1264 }
1265 
1266 /*MC
1267   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1268 
1269   Level: intermediate
1270 
1271 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1272 M*/
1273 
1274 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1275 {
1276   PetscPartitioner_Gather *p;
1277   PetscErrorCode           ierr;
1278 
1279   PetscFunctionBegin;
1280   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1281   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1282   part->data = p;
1283 
1284   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 
1289 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1290 {
1291   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1292   PetscErrorCode          ierr;
1293 
1294   PetscFunctionBegin;
1295   ierr = PetscFree(p);CHKERRQ(ierr);
1296   PetscFunctionReturn(0);
1297 }
1298 
1299 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1300 {
1301   PetscFunctionBegin;
1302   PetscFunctionReturn(0);
1303 }
1304 
1305 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1306 {
1307   PetscBool      iascii;
1308   PetscErrorCode ierr;
1309 
1310   PetscFunctionBegin;
1311   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1312   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1313   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1314   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1315   PetscFunctionReturn(0);
1316 }
1317 
1318 #if defined(PETSC_HAVE_CHACO)
1319 #if defined(PETSC_HAVE_UNISTD_H)
1320 #include <unistd.h>
1321 #endif
1322 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1323 #include <chaco.h>
1324 #else
1325 /* Older versions of Chaco do not have an include file */
1326 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1327                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1328                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1329                        int mesh_dims[3], double *goal, int global_method, int local_method,
1330                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1331 #endif
1332 extern int FREE_GRAPH;
1333 #endif
1334 
1335 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1336 {
1337 #if defined(PETSC_HAVE_CHACO)
1338   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1339   MPI_Comm       comm;
1340   int            nvtxs          = numVertices; /* number of vertices in full graph */
1341   int           *vwgts          = NULL;   /* weights for all vertices */
1342   float         *ewgts          = NULL;   /* weights for all edges */
1343   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1344   char          *outassignname  = NULL;   /*  name of assignment output file */
1345   char          *outfilename    = NULL;   /* output file name */
1346   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1347   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1348   int            mesh_dims[3];            /* dimensions of mesh of processors */
1349   double        *goal          = NULL;    /* desired set sizes for each set */
1350   int            global_method = 1;       /* global partitioning algorithm */
1351   int            local_method  = 1;       /* local partitioning algorithm */
1352   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1353   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1354   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1355   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1356   long           seed          = 123636512; /* for random graph mutations */
1357 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1358   int           *assignment;              /* Output partition */
1359 #else
1360   short int     *assignment;              /* Output partition */
1361 #endif
1362   int            fd_stdout, fd_pipe[2];
1363   PetscInt      *points;
1364   int            i, v, p;
1365   PetscErrorCode ierr;
1366 
1367   PetscFunctionBegin;
1368   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1369 #if defined (PETSC_USE_DEBUG)
1370   {
1371     int ival,isum;
1372     PetscBool distributed;
1373 
1374     ival = (numVertices > 0);
1375     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1376     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1377     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1378   }
1379 #endif
1380   if (!numVertices) {
1381     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1382     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1383     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1384     PetscFunctionReturn(0);
1385   }
1386   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1387   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1388 
1389   if (global_method == INERTIAL_METHOD) {
1390     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1391     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1392   }
1393   mesh_dims[0] = nparts;
1394   mesh_dims[1] = 1;
1395   mesh_dims[2] = 1;
1396   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1397   /* Chaco outputs to stdout. We redirect this to a buffer. */
1398   /* TODO: check error codes for UNIX calls */
1399 #if defined(PETSC_HAVE_UNISTD_H)
1400   {
1401     int piperet;
1402     piperet = pipe(fd_pipe);
1403     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1404     fd_stdout = dup(1);
1405     close(1);
1406     dup2(fd_pipe[1], 1);
1407   }
1408 #endif
1409   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1410                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1411                    vmax, ndims, eigtol, seed);
1412 #if defined(PETSC_HAVE_UNISTD_H)
1413   {
1414     char msgLog[10000];
1415     int  count;
1416 
1417     fflush(stdout);
1418     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1419     if (count < 0) count = 0;
1420     msgLog[count] = 0;
1421     close(1);
1422     dup2(fd_stdout, 1);
1423     close(fd_stdout);
1424     close(fd_pipe[0]);
1425     close(fd_pipe[1]);
1426     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1427   }
1428 #else
1429   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1430 #endif
1431   /* Convert to PetscSection+IS */
1432   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1433   for (v = 0; v < nvtxs; ++v) {
1434     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1435   }
1436   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1437   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1438   for (p = 0, i = 0; p < nparts; ++p) {
1439     for (v = 0; v < nvtxs; ++v) {
1440       if (assignment[v] == p) points[i++] = v;
1441     }
1442   }
1443   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1444   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1445   if (global_method == INERTIAL_METHOD) {
1446     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1447   }
1448   ierr = PetscFree(assignment);CHKERRQ(ierr);
1449   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1450   PetscFunctionReturn(0);
1451 #else
1452   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1453 #endif
1454 }
1455 
1456 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1457 {
1458   PetscFunctionBegin;
1459   part->ops->view      = PetscPartitionerView_Chaco;
1460   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1461   part->ops->partition = PetscPartitionerPartition_Chaco;
1462   PetscFunctionReturn(0);
1463 }
1464 
1465 /*MC
1466   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1467 
1468   Level: intermediate
1469 
1470 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1471 M*/
1472 
1473 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1474 {
1475   PetscPartitioner_Chaco *p;
1476   PetscErrorCode          ierr;
1477 
1478   PetscFunctionBegin;
1479   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1480   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1481   part->data = p;
1482 
1483   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1484   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1485   PetscFunctionReturn(0);
1486 }
1487 
1488 static const char *ptypes[] = {"kway", "rb"};
1489 
1490 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1491 {
1492   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1493   PetscErrorCode             ierr;
1494 
1495   PetscFunctionBegin;
1496   ierr = PetscFree(p);CHKERRQ(ierr);
1497   PetscFunctionReturn(0);
1498 }
1499 
1500 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1501 {
1502   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1503   PetscErrorCode             ierr;
1504 
1505   PetscFunctionBegin;
1506   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1507   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1508   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1509   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1510   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1511   PetscFunctionReturn(0);
1512 }
1513 
1514 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1515 {
1516   PetscBool      iascii;
1517   PetscErrorCode ierr;
1518 
1519   PetscFunctionBegin;
1520   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1521   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1522   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1523   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1524   PetscFunctionReturn(0);
1525 }
1526 
1527 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1528 {
1529   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1530   PetscInt                  p_randomSeed = -1; /* TODO: Add this to PetscPartitioner_ParMetis */
1531   PetscErrorCode            ierr;
1532 
1533   PetscFunctionBegin;
1534   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1535   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1536   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1537   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1538   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p_randomSeed, &p_randomSeed, NULL);CHKERRQ(ierr);
1539   ierr = PetscOptionsTail();CHKERRQ(ierr);
1540   PetscFunctionReturn(0);
1541 }
1542 
1543 #if defined(PETSC_HAVE_PARMETIS)
1544 #include <parmetis.h>
1545 #endif
1546 
1547 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1548 {
1549 #if defined(PETSC_HAVE_PARMETIS)
1550   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1551   MPI_Comm       comm;
1552   PetscSection   section;
1553   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1554   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1555   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1556   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1557   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1558   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1559   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1560   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1561   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1562   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1563   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1564   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1565   PetscInt       options[64];               /* Options */
1566   /* Outputs */
1567   PetscInt       v, i, *assignment, *points;
1568   PetscMPIInt    size, rank, p;
1569   PetscInt       pm_randomSeed = -1;
1570   PetscErrorCode ierr;
1571 
1572   PetscFunctionBegin;
1573   ierr = PetscOptionsGetInt(((PetscObject)part)->options,((PetscObject)part)->prefix,"-petscpartitioner_parmetis_seed", &pm_randomSeed, NULL);CHKERRQ(ierr);
1574   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1575   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1576   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1577   /* Calculate vertex distribution */
1578   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1579   vtxdist[0] = 0;
1580   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1581   for (p = 2; p <= size; ++p) {
1582     vtxdist[p] += vtxdist[p-1];
1583   }
1584   /* Calculate partition weights */
1585   for (p = 0; p < nparts; ++p) {
1586     tpwgts[p] = 1.0/nparts;
1587   }
1588   ubvec[0] = pm->imbalanceRatio;
1589   /* Weight cells by dofs on cell by default */
1590   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1591   if (section) {
1592     PetscInt cStart, cEnd, dof;
1593 
1594     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1595     for (v = cStart; v < cEnd; ++v) {
1596       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1597       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1598       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1599       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1600     }
1601   } else {
1602     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1603   }
1604   wgtflag |= 2; /* have weights on graph vertices */
1605 
1606   if (nparts == 1) {
1607     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1608   } else {
1609     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1610     if (vtxdist[p+1] == vtxdist[size]) {
1611       if (rank == p) {
1612         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1613         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1614         options[METIS_OPTION_SEED]   = pm_randomSeed;
1615         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1616         if (metis_ptype == 1) {
1617           PetscStackPush("METIS_PartGraphRecursive");
1618           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1619           PetscStackPop;
1620           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1621         } else {
1622           /*
1623            It would be nice to activate the two options below, but they would need some actual testing.
1624            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1625            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1626           */
1627           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1628           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1629           PetscStackPush("METIS_PartGraphKway");
1630           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1631           PetscStackPop;
1632           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1633         }
1634       }
1635     } else {
1636       options[0] = 1; /*use options */
1637       options[1] = pm->debugFlag;
1638       options[2] = (pm_randomSeed == -1) ? 15 : pm_randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1639       PetscStackPush("ParMETIS_V3_PartKway");
1640       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1641       PetscStackPop;
1642       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1643     }
1644   }
1645   /* Convert to PetscSection+IS */
1646   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1647   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1648   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1649   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1650   for (p = 0, i = 0; p < nparts; ++p) {
1651     for (v = 0; v < nvtxs; ++v) {
1652       if (assignment[v] == p) points[i++] = v;
1653     }
1654   }
1655   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1656   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1657   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1658   PetscFunctionReturn(0);
1659 #else
1660   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1661 #endif
1662 }
1663 
1664 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1665 {
1666   PetscFunctionBegin;
1667   part->ops->view           = PetscPartitionerView_ParMetis;
1668   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1669   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1670   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1671   PetscFunctionReturn(0);
1672 }
1673 
1674 /*MC
1675   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1676 
1677   Level: intermediate
1678 
1679 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1680 M*/
1681 
1682 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1683 {
1684   PetscPartitioner_ParMetis *p;
1685   PetscErrorCode          ierr;
1686 
1687   PetscFunctionBegin;
1688   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1689   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1690   part->data = p;
1691 
1692   p->ptype          = 0;
1693   p->imbalanceRatio = 1.05;
1694   p->debugFlag      = 0;
1695 
1696   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1697   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1698   PetscFunctionReturn(0);
1699 }
1700 
1701 
1702 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1703 const char PTScotchPartitionerCitation[] =
1704   "@article{PTSCOTCH,\n"
1705   "  author  = {C. Chevalier and F. Pellegrini},\n"
1706   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1707   "  journal = {Parallel Computing},\n"
1708   "  volume  = {34},\n"
1709   "  number  = {6},\n"
1710   "  pages   = {318--331},\n"
1711   "  year    = {2008},\n"
1712   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1713   "}\n";
1714 
1715 typedef struct {
1716   PetscInt  strategy;
1717   PetscReal imbalance;
1718 } PetscPartitioner_PTScotch;
1719 
1720 static const char *const
1721 PTScotchStrategyList[] = {
1722   "DEFAULT",
1723   "QUALITY",
1724   "SPEED",
1725   "BALANCE",
1726   "SAFETY",
1727   "SCALABILITY",
1728   "RECURSIVE",
1729   "REMAP"
1730 };
1731 
1732 #if defined(PETSC_HAVE_PTSCOTCH)
1733 
1734 EXTERN_C_BEGIN
1735 #include <ptscotch.h>
1736 EXTERN_C_END
1737 
1738 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1739 
1740 static int PTScotch_Strategy(PetscInt strategy)
1741 {
1742   switch (strategy) {
1743   case  0: return SCOTCH_STRATDEFAULT;
1744   case  1: return SCOTCH_STRATQUALITY;
1745   case  2: return SCOTCH_STRATSPEED;
1746   case  3: return SCOTCH_STRATBALANCE;
1747   case  4: return SCOTCH_STRATSAFETY;
1748   case  5: return SCOTCH_STRATSCALABILITY;
1749   case  6: return SCOTCH_STRATRECURSIVE;
1750   case  7: return SCOTCH_STRATREMAP;
1751   default: return SCOTCH_STRATDEFAULT;
1752   }
1753 }
1754 
1755 
1756 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1757                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1758 {
1759   SCOTCH_Graph   grafdat;
1760   SCOTCH_Strat   stradat;
1761   SCOTCH_Num     vertnbr = n;
1762   SCOTCH_Num     edgenbr = xadj[n];
1763   SCOTCH_Num*    velotab = vtxwgt;
1764   SCOTCH_Num*    edlotab = adjwgt;
1765   SCOTCH_Num     flagval = strategy;
1766   double         kbalval = imbalance;
1767   PetscErrorCode ierr;
1768 
1769   PetscFunctionBegin;
1770   {
1771     PetscBool flg = PETSC_TRUE;
1772     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1773     if (!flg) velotab = NULL;
1774   }
1775   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1776   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1777   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1778   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1779 #if defined(PETSC_USE_DEBUG)
1780   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1781 #endif
1782   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1783   SCOTCH_stratExit(&stradat);
1784   SCOTCH_graphExit(&grafdat);
1785   PetscFunctionReturn(0);
1786 }
1787 
1788 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1789                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1790 {
1791   PetscMPIInt     procglbnbr;
1792   PetscMPIInt     proclocnum;
1793   SCOTCH_Arch     archdat;
1794   SCOTCH_Dgraph   grafdat;
1795   SCOTCH_Dmapping mappdat;
1796   SCOTCH_Strat    stradat;
1797   SCOTCH_Num      vertlocnbr;
1798   SCOTCH_Num      edgelocnbr;
1799   SCOTCH_Num*     veloloctab = vtxwgt;
1800   SCOTCH_Num*     edloloctab = adjwgt;
1801   SCOTCH_Num      flagval = strategy;
1802   double          kbalval = imbalance;
1803   PetscErrorCode  ierr;
1804 
1805   PetscFunctionBegin;
1806   {
1807     PetscBool flg = PETSC_TRUE;
1808     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1809     if (!flg) veloloctab = NULL;
1810   }
1811   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1812   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1813   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1814   edgelocnbr = xadj[vertlocnbr];
1815 
1816   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1817   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1818 #if defined(PETSC_USE_DEBUG)
1819   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1820 #endif
1821   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1822   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1823   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1824   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1825   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1826 
1827   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1828   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1829   SCOTCH_archExit(&archdat);
1830   SCOTCH_stratExit(&stradat);
1831   SCOTCH_dgraphExit(&grafdat);
1832   PetscFunctionReturn(0);
1833 }
1834 
1835 #endif /* PETSC_HAVE_PTSCOTCH */
1836 
1837 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1838 {
1839   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1840   PetscErrorCode             ierr;
1841 
1842   PetscFunctionBegin;
1843   ierr = PetscFree(p);CHKERRQ(ierr);
1844   PetscFunctionReturn(0);
1845 }
1846 
1847 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1848 {
1849   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1850   PetscErrorCode            ierr;
1851 
1852   PetscFunctionBegin;
1853   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1854   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1855   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1856   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1857   PetscFunctionReturn(0);
1858 }
1859 
1860 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1861 {
1862   PetscBool      iascii;
1863   PetscErrorCode ierr;
1864 
1865   PetscFunctionBegin;
1866   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1867   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1868   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1869   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1870   PetscFunctionReturn(0);
1871 }
1872 
1873 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1874 {
1875   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1876   const char *const         *slist = PTScotchStrategyList;
1877   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1878   PetscBool                 flag;
1879   PetscErrorCode            ierr;
1880 
1881   PetscFunctionBegin;
1882   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1883   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1884   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1885   ierr = PetscOptionsTail();CHKERRQ(ierr);
1886   PetscFunctionReturn(0);
1887 }
1888 
1889 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1890 {
1891 #if defined(PETSC_HAVE_PTSCOTCH)
1892   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1893   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1894   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1895   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1896   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1897   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1898   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1899   PetscInt       v, i, *assignment, *points;
1900   PetscMPIInt    size, rank, p;
1901   PetscErrorCode ierr;
1902 
1903   PetscFunctionBegin;
1904   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1905   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1906   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1907 
1908   /* Calculate vertex distribution */
1909   vtxdist[0] = 0;
1910   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1911   for (p = 2; p <= size; ++p) {
1912     vtxdist[p] += vtxdist[p-1];
1913   }
1914 
1915   if (nparts == 1) {
1916     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1917   } else {
1918     PetscSection section;
1919     /* Weight cells by dofs on cell by default */
1920     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1921     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1922     if (section) {
1923       PetscInt vStart, eEnd, dof;
1924       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &eEnd);CHKERRQ(ierr);
1925       for (v = vStart; v < eEnd; ++v) {
1926         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1927         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1928         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1929         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1930       }
1931     } else {
1932       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1933     }
1934     {
1935       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1936       int                       strat = PTScotch_Strategy(pts->strategy);
1937       double                    imbal = (double)pts->imbalance;
1938 
1939       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1940       if (vtxdist[p+1] == vtxdist[size]) {
1941         if (rank == p) {
1942           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1943         }
1944       } else {
1945         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1946       }
1947     }
1948     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1949   }
1950 
1951   /* Convert to PetscSection+IS */
1952   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1953   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1954   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1955   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1956   for (p = 0, i = 0; p < nparts; ++p) {
1957     for (v = 0; v < nvtxs; ++v) {
1958       if (assignment[v] == p) points[i++] = v;
1959     }
1960   }
1961   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1962   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1963 
1964   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1965   PetscFunctionReturn(0);
1966 #else
1967   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1968 #endif
1969 }
1970 
1971 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1972 {
1973   PetscFunctionBegin;
1974   part->ops->view           = PetscPartitionerView_PTScotch;
1975   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1976   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1977   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1978   PetscFunctionReturn(0);
1979 }
1980 
1981 /*MC
1982   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1983 
1984   Level: intermediate
1985 
1986 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1987 M*/
1988 
1989 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1990 {
1991   PetscPartitioner_PTScotch *p;
1992   PetscErrorCode          ierr;
1993 
1994   PetscFunctionBegin;
1995   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1996   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1997   part->data = p;
1998 
1999   p->strategy  = 0;
2000   p->imbalance = 0.01;
2001 
2002   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2003   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2004   PetscFunctionReturn(0);
2005 }
2006 
2007 
2008 /*@
2009   DMPlexGetPartitioner - Get the mesh partitioner
2010 
2011   Not collective
2012 
2013   Input Parameter:
2014 . dm - The DM
2015 
2016   Output Parameter:
2017 . part - The PetscPartitioner
2018 
2019   Level: developer
2020 
2021   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2022 
2023 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
2024 @*/
2025 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2026 {
2027   DM_Plex       *mesh = (DM_Plex *) dm->data;
2028 
2029   PetscFunctionBegin;
2030   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2031   PetscValidPointer(part, 2);
2032   *part = mesh->partitioner;
2033   PetscFunctionReturn(0);
2034 }
2035 
2036 /*@
2037   DMPlexSetPartitioner - Set the mesh partitioner
2038 
2039   logically collective on dm and part
2040 
2041   Input Parameters:
2042 + dm - The DM
2043 - part - The partitioner
2044 
2045   Level: developer
2046 
2047   Note: Any existing PetscPartitioner will be destroyed.
2048 
2049 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2050 @*/
2051 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2052 {
2053   DM_Plex       *mesh = (DM_Plex *) dm->data;
2054   PetscErrorCode ierr;
2055 
2056   PetscFunctionBegin;
2057   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2058   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2059   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2060   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2061   mesh->partitioner = part;
2062   PetscFunctionReturn(0);
2063 }
2064 
2065 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2066 {
2067   PetscErrorCode ierr;
2068 
2069   PetscFunctionBegin;
2070   if (up) {
2071     PetscInt parent;
2072 
2073     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2074     if (parent != point) {
2075       PetscInt closureSize, *closure = NULL, i;
2076 
2077       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2078       for (i = 0; i < closureSize; i++) {
2079         PetscInt cpoint = closure[2*i];
2080 
2081         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2082         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2083       }
2084       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2085     }
2086   }
2087   if (down) {
2088     PetscInt numChildren;
2089     const PetscInt *children;
2090 
2091     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2092     if (numChildren) {
2093       PetscInt i;
2094 
2095       for (i = 0; i < numChildren; i++) {
2096         PetscInt cpoint = children[i];
2097 
2098         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2099         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2100       }
2101     }
2102   }
2103   PetscFunctionReturn(0);
2104 }
2105 
2106 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
2107 
2108 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2109 {
2110   DM_Plex        *mesh = (DM_Plex *)dm->data;
2111   PetscBool      hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2112   PetscInt       *closure = NULL, closureSize, nelems, *elems, off = 0, p, c;
2113   PetscHSetI     ht;
2114   PetscErrorCode ierr;
2115 
2116   PetscFunctionBegin;
2117   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2118   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2119   for (p = 0; p < numPoints; ++p) {
2120     ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2121     for (c = 0; c < closureSize*2; c += 2) {
2122       ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2123       if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2124     }
2125   }
2126   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2127   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2128   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2129   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2130   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2131   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2132   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2133   PetscFunctionReturn(0);
2134 }
2135 
2136 /*@
2137   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2138 
2139   Input Parameters:
2140 + dm     - The DM
2141 - label  - DMLabel assinging ranks to remote roots
2142 
2143   Level: developer
2144 
2145 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2146 @*/
2147 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2148 {
2149   IS              rankIS,   pointIS, closureIS;
2150   const PetscInt *ranks,   *points;
2151   PetscInt        numRanks, numPoints, r;
2152   PetscErrorCode  ierr;
2153 
2154   PetscFunctionBegin;
2155   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2156   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2157   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2158   for (r = 0; r < numRanks; ++r) {
2159     const PetscInt rank = ranks[r];
2160     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2161     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2162     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2163     ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr);
2164     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2165     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2166     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2167     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2168   }
2169   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2170   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2171   PetscFunctionReturn(0);
2172 }
2173 
2174 /*@
2175   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2176 
2177   Input Parameters:
2178 + dm     - The DM
2179 - label  - DMLabel assinging ranks to remote roots
2180 
2181   Level: developer
2182 
2183 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2184 @*/
2185 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2186 {
2187   IS              rankIS,   pointIS;
2188   const PetscInt *ranks,   *points;
2189   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2190   PetscInt       *adj = NULL;
2191   PetscErrorCode  ierr;
2192 
2193   PetscFunctionBegin;
2194   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2195   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2196   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2197   for (r = 0; r < numRanks; ++r) {
2198     const PetscInt rank = ranks[r];
2199 
2200     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2201     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2202     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2203     for (p = 0; p < numPoints; ++p) {
2204       adjSize = PETSC_DETERMINE;
2205       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2206       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2207     }
2208     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2209     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2210   }
2211   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2212   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2213   ierr = PetscFree(adj);CHKERRQ(ierr);
2214   PetscFunctionReturn(0);
2215 }
2216 
2217 /*@
2218   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2219 
2220   Input Parameters:
2221 + dm     - The DM
2222 - label  - DMLabel assinging ranks to remote roots
2223 
2224   Level: developer
2225 
2226   Note: This is required when generating multi-level overlaps to capture
2227   overlap points from non-neighbouring partitions.
2228 
2229 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2230 @*/
2231 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2232 {
2233   MPI_Comm        comm;
2234   PetscMPIInt     rank;
2235   PetscSF         sfPoint;
2236   DMLabel         lblRoots, lblLeaves;
2237   IS              rankIS, pointIS;
2238   const PetscInt *ranks;
2239   PetscInt        numRanks, r;
2240   PetscErrorCode  ierr;
2241 
2242   PetscFunctionBegin;
2243   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2244   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2245   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2246   /* Pull point contributions from remote leaves into local roots */
2247   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2248   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2249   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2250   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2251   for (r = 0; r < numRanks; ++r) {
2252     const PetscInt remoteRank = ranks[r];
2253     if (remoteRank == rank) continue;
2254     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2255     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2256     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2257   }
2258   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2259   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2260   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2261   /* Push point contributions from roots into remote leaves */
2262   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2263   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2264   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2265   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2266   for (r = 0; r < numRanks; ++r) {
2267     const PetscInt remoteRank = ranks[r];
2268     if (remoteRank == rank) continue;
2269     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2270     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2271     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2272   }
2273   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2274   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2275   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2276   PetscFunctionReturn(0);
2277 }
2278 
2279 /*@
2280   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2281 
2282   Input Parameters:
2283 + dm        - The DM
2284 . rootLabel - DMLabel assinging ranks to local roots
2285 . processSF - A star forest mapping into the local index on each remote rank
2286 
2287   Output Parameter:
2288 - leafLabel - DMLabel assinging ranks to remote roots
2289 
2290   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2291   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2292 
2293   Level: developer
2294 
2295 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2296 @*/
2297 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2298 {
2299   MPI_Comm           comm;
2300   PetscMPIInt        rank, size, r;
2301   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2302   PetscSF            sfPoint;
2303   PetscSection       rootSection;
2304   PetscSFNode       *rootPoints, *leafPoints;
2305   const PetscSFNode *remote;
2306   const PetscInt    *local, *neighbors;
2307   IS                 valueIS;
2308   PetscBool          mpiOverflow = PETSC_FALSE;
2309   PetscErrorCode     ierr;
2310 
2311   PetscFunctionBegin;
2312   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2313   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2314   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2315   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2316 
2317   /* Convert to (point, rank) and use actual owners */
2318   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2319   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2320   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2321   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2322   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2323   for (n = 0; n < numNeighbors; ++n) {
2324     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2325     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2326   }
2327   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2328   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2329   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2330   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2331   for (n = 0; n < numNeighbors; ++n) {
2332     IS              pointIS;
2333     const PetscInt *points;
2334 
2335     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2336     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2337     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2338     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2339     for (p = 0; p < numPoints; ++p) {
2340       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2341       else       {l = -1;}
2342       if (l >= 0) {rootPoints[off+p] = remote[l];}
2343       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2344     }
2345     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2346     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2347   }
2348 
2349   /* Try to communicate overlap using All-to-All */
2350   if (!processSF) {
2351     PetscInt64  counter = 0;
2352     PetscBool   locOverflow = PETSC_FALSE;
2353     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2354 
2355     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2356     for (n = 0; n < numNeighbors; ++n) {
2357       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2358       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2359 #if defined(PETSC_USE_64BIT_INDICES)
2360       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2361       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2362 #endif
2363       scounts[neighbors[n]] = (PetscMPIInt) dof;
2364       sdispls[neighbors[n]] = (PetscMPIInt) off;
2365     }
2366     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2367     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2368     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2369     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2370     if (!mpiOverflow) {
2371       leafSize = (PetscInt) counter;
2372       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2373       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2374     }
2375     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2376   }
2377 
2378   /* Communicate overlap using process star forest */
2379   if (processSF || mpiOverflow) {
2380     PetscSF      procSF;
2381     PetscSFNode  *remote;
2382     PetscSection leafSection;
2383 
2384     if (processSF) {
2385       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2386       procSF = processSF;
2387     } else {
2388       ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr);
2389       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2390       ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr);
2391       ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2392     }
2393 
2394     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2395     ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2396     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2397     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2398     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2399   }
2400 
2401   for (p = 0; p < leafSize; p++) {
2402     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2403   }
2404 
2405   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2406   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2407   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2408   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2409   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2410   PetscFunctionReturn(0);
2411 }
2412 
2413 /*@
2414   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2415 
2416   Input Parameters:
2417 + dm    - The DM
2418 . label - DMLabel assinging ranks to remote roots
2419 
2420   Output Parameter:
2421 - sf    - The star forest communication context encapsulating the defined mapping
2422 
2423   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2424 
2425   Level: developer
2426 
2427 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2428 @*/
2429 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2430 {
2431   PetscMPIInt     rank, size;
2432   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2433   PetscSFNode    *remotePoints;
2434   IS              remoteRootIS;
2435   const PetscInt *remoteRoots;
2436   PetscErrorCode ierr;
2437 
2438   PetscFunctionBegin;
2439   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2440   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2441 
2442   for (numRemote = 0, n = 0; n < size; ++n) {
2443     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2444     numRemote += numPoints;
2445   }
2446   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2447   /* Put owned points first */
2448   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2449   if (numPoints > 0) {
2450     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2451     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2452     for (p = 0; p < numPoints; p++) {
2453       remotePoints[idx].index = remoteRoots[p];
2454       remotePoints[idx].rank = rank;
2455       idx++;
2456     }
2457     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2458     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2459   }
2460   /* Now add remote points */
2461   for (n = 0; n < size; ++n) {
2462     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2463     if (numPoints <= 0 || n == rank) continue;
2464     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2465     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2466     for (p = 0; p < numPoints; p++) {
2467       remotePoints[idx].index = remoteRoots[p];
2468       remotePoints[idx].rank = n;
2469       idx++;
2470     }
2471     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2472     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2473   }
2474   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2475   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2476   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2477   PetscFunctionReturn(0);
2478 }
2479 
2480 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
2481  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
2482  * them out in that case. */
2483 #if defined(PETSC_HAVE_PARMETIS)
2484 /*@C
2485 
2486   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
2487 
2488   Input parameters:
2489   + dm                - The DMPlex object.
2490   + n                 - The number of points.
2491   + pointsToRewrite   - The points in the SF whose ownership will change.
2492   + targetOwners      - New owner for each element in pointsToRewrite.
2493   + degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
2494 
2495   Level: developer
2496 
2497 @*/
2498 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
2499 {
2500   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
2501   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
2502   PetscSFNode  *leafLocationsNew;
2503   const         PetscSFNode *iremote;
2504   const         PetscInt *ilocal;
2505   PetscBool    *isLeaf;
2506   PetscSF       sf;
2507   MPI_Comm      comm;
2508   PetscMPIInt   rank, size;
2509 
2510   PetscFunctionBegin;
2511   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2512   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2513   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2514   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2515 
2516   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2517   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2518   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2519   for (i=0; i<pEnd-pStart; i++) {
2520     isLeaf[i] = PETSC_FALSE;
2521   }
2522   for (i=0; i<nleafs; i++) {
2523     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2524   }
2525 
2526   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
2527   cumSumDegrees[0] = 0;
2528   for (i=1; i<=pEnd-pStart; i++) {
2529     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
2530   }
2531   sumDegrees = cumSumDegrees[pEnd-pStart];
2532   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
2533 
2534   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
2535   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
2536   for (i=0; i<pEnd-pStart; i++) {
2537     rankOnLeafs[i] = rank;
2538   }
2539   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2540   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2541   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
2542 
2543   /* get the remote local points of my leaves */
2544   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
2545   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
2546   for (i=0; i<pEnd-pStart; i++) {
2547     points[i] = pStart+i;
2548   }
2549   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2550   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2551   ierr = PetscFree(points);CHKERRQ(ierr);
2552   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
2553   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
2554   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
2555   for (i=0; i<pEnd-pStart; i++) {
2556     newOwners[i] = -1;
2557     newNumbers[i] = -1;
2558   }
2559   {
2560     PetscInt oldNumber, newNumber, oldOwner, newOwner;
2561     for (i=0; i<n; i++) {
2562       oldNumber = pointsToRewrite[i];
2563       newNumber = -1;
2564       oldOwner = rank;
2565       newOwner = targetOwners[i];
2566       if (oldOwner == newOwner) {
2567         newNumber = oldNumber;
2568       } else {
2569         for (j=0; j<degrees[oldNumber]; j++) {
2570           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
2571             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
2572             break;
2573           }
2574         }
2575       }
2576       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
2577 
2578       newOwners[oldNumber] = newOwner;
2579       newNumbers[oldNumber] = newNumber;
2580     }
2581   }
2582   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
2583   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
2584   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
2585 
2586   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2587   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2588   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2589   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2590 
2591   /* Now count how many leafs we have on each processor. */
2592   leafCounter=0;
2593   for (i=0; i<pEnd-pStart; i++) {
2594     if (newOwners[i] >= 0) {
2595       if (newOwners[i] != rank) {
2596         leafCounter++;
2597       }
2598     } else {
2599       if (isLeaf[i]) {
2600         leafCounter++;
2601       }
2602     }
2603   }
2604 
2605   /* Now set up the new sf by creating the leaf arrays */
2606   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
2607   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
2608 
2609   leafCounter = 0;
2610   counter = 0;
2611   for (i=0; i<pEnd-pStart; i++) {
2612     if (newOwners[i] >= 0) {
2613       if (newOwners[i] != rank) {
2614         leafsNew[leafCounter] = i;
2615         leafLocationsNew[leafCounter].rank = newOwners[i];
2616         leafLocationsNew[leafCounter].index = newNumbers[i];
2617         leafCounter++;
2618       }
2619     } else {
2620       if (isLeaf[i]) {
2621         leafsNew[leafCounter] = i;
2622         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
2623         leafLocationsNew[leafCounter].index = iremote[counter].index;
2624         leafCounter++;
2625       }
2626     }
2627     if (isLeaf[i]) {
2628       counter++;
2629     }
2630   }
2631 
2632   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
2633   ierr = PetscFree(newOwners);CHKERRQ(ierr);
2634   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
2635   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
2636   PetscFunctionReturn(0);
2637 }
2638 
2639 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
2640 {
2641   PetscInt *distribution, min, max, sum, i, ierr;
2642   PetscMPIInt rank, size;
2643   PetscFunctionBegin;
2644   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2645   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2646   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
2647   for (i=0; i<n; i++) {
2648     if (part) distribution[part[i]] += vtxwgt[skip*i];
2649     else distribution[rank] += vtxwgt[skip*i];
2650   }
2651   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2652   min = distribution[0];
2653   max = distribution[0];
2654   sum = distribution[0];
2655   for (i=1; i<size; i++) {
2656     if (distribution[i]<min) min=distribution[i];
2657     if (distribution[i]>max) max=distribution[i];
2658     sum += distribution[i];
2659   }
2660   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
2661   ierr = PetscFree(distribution);CHKERRQ(ierr);
2662   PetscFunctionReturn(0);
2663 }
2664 
2665 #endif
2666 
2667 /*@
2668   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
2669 
2670   Input parameters:
2671   + dm               - The DMPlex object.
2672   + entityDepth      - depth of the entity to balance (0 -> balance vertices).
2673   + useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
2674   + parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
2675 
2676   Output parameters:
2677   + success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
2678 
2679   Level: user
2680 
2681 @*/
2682 
2683 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
2684 {
2685 #if defined(PETSC_HAVE_PARMETIS)
2686   PetscSF     sf;
2687   PetscInt    ierr, i, j, idx, jdx;
2688   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
2689   const       PetscInt *degrees, *ilocal;
2690   const       PetscSFNode *iremote;
2691   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
2692   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
2693   PetscMPIInt rank, size;
2694   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
2695   const       PetscInt *cumSumVertices;
2696   PetscInt    offset, counter;
2697   PetscInt    lenadjncy;
2698   PetscInt    *xadj, *adjncy, *vtxwgt;
2699   PetscInt    lenxadj;
2700   PetscInt    *adjwgt = NULL;
2701   PetscInt    *part, *options;
2702   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
2703   real_t      *ubvec;
2704   PetscInt    *firstVertices, *renumbering;
2705   PetscInt    failed, failedGlobal;
2706   MPI_Comm    comm;
2707   Mat         A, Apre;
2708   const char *prefix = NULL;
2709   PetscViewer       viewer;
2710   PetscViewerFormat format;
2711   PetscLayout layout;
2712 
2713   PetscFunctionBegin;
2714   if (success) *success = PETSC_FALSE;
2715   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2716   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2717   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2718   if (size==1) PetscFunctionReturn(0);
2719 
2720   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
2721 
2722   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
2723   if (viewer) {
2724     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
2725   }
2726 
2727   /* Figure out all points in the plex that we are interested in balancing. */
2728   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
2729   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2730   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
2731 
2732   for (i=0; i<pEnd-pStart; i++) {
2733     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
2734   }
2735 
2736   /* There are three types of points:
2737    * exclusivelyOwned: points that are owned by this process and only seen by this process
2738    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
2739    * leaf: a point that is seen by this process but owned by a different process
2740    */
2741   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2742   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2743   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2744   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
2745   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
2746   for (i=0; i<pEnd-pStart; i++) {
2747     isNonExclusivelyOwned[i] = PETSC_FALSE;
2748     isExclusivelyOwned[i] = PETSC_FALSE;
2749     isLeaf[i] = PETSC_FALSE;
2750   }
2751 
2752   /* start by marking all the leafs */
2753   for (i=0; i<nleafs; i++) {
2754     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2755   }
2756 
2757   /* for an owned point, we can figure out whether another processor sees it or
2758    * not by calculating its degree */
2759   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
2760   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
2761 
2762   numExclusivelyOwned = 0;
2763   numNonExclusivelyOwned = 0;
2764   for (i=0; i<pEnd-pStart; i++) {
2765     if (toBalance[i]) {
2766       if (degrees[i] > 0) {
2767         isNonExclusivelyOwned[i] = PETSC_TRUE;
2768         numNonExclusivelyOwned += 1;
2769       } else {
2770         if (!isLeaf[i]) {
2771           isExclusivelyOwned[i] = PETSC_TRUE;
2772           numExclusivelyOwned += 1;
2773         }
2774       }
2775     }
2776   }
2777 
2778   /* We are going to build a graph with one vertex per core representing the
2779    * exclusively owned points and then one vertex per nonExclusively owned
2780    * point. */
2781 
2782   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2783   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
2784   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2785   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
2786 
2787   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
2788   offset = cumSumVertices[rank];
2789   counter = 0;
2790   for (i=0; i<pEnd-pStart; i++) {
2791     if (toBalance[i]) {
2792       if (degrees[i] > 0) {
2793         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
2794         counter++;
2795       }
2796     }
2797   }
2798 
2799   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
2800   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
2801   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
2802   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
2803 
2804   /* Now start building the data structure for ParMETIS */
2805 
2806   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
2807   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
2808   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
2809   ierr = MatSetUp(Apre);CHKERRQ(ierr);
2810 
2811   for (i=0; i<pEnd-pStart; i++) {
2812     if (toBalance[i]) {
2813       idx = cumSumVertices[rank];
2814       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
2815       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
2816       else continue;
2817       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
2818       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
2819     }
2820   }
2821 
2822   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2823   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2824 
2825   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
2826   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
2827   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
2828   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
2829   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
2830 
2831   for (i=0; i<pEnd-pStart; i++) {
2832     if (toBalance[i]) {
2833       idx = cumSumVertices[rank];
2834       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
2835       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
2836       else continue;
2837       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
2838       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
2839     }
2840   }
2841 
2842   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2843   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2844   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
2845   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
2846 
2847   nparts = size;
2848   wgtflag = 2;
2849   numflag = 0;
2850   ncon = 2;
2851   real_t *tpwgts;
2852   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
2853   for (i=0; i<ncon*nparts; i++) {
2854     tpwgts[i] = 1./(nparts);
2855   }
2856 
2857   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
2858   ubvec[0] = 1.01;
2859   ubvec[1] = 1.01;
2860   lenadjncy = 0;
2861   for (i=0; i<1+numNonExclusivelyOwned; i++) {
2862     PetscInt temp=0;
2863     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
2864     lenadjncy += temp;
2865     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
2866   }
2867   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
2868   lenxadj = 2 + numNonExclusivelyOwned;
2869   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
2870   xadj[0] = 0;
2871   counter = 0;
2872   for (i=0; i<1+numNonExclusivelyOwned; i++) {
2873     PetscInt        temp=0;
2874     const PetscInt *cols;
2875     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
2876     ierr = PetscMemcpy(&adjncy[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
2877     counter += temp;
2878     xadj[i+1] = counter;
2879     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
2880   }
2881 
2882   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
2883   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
2884   vtxwgt[0] = numExclusivelyOwned;
2885   if (ncon>1) vtxwgt[1] = 1;
2886   for (i=0; i<numNonExclusivelyOwned; i++) {
2887     vtxwgt[ncon*(i+1)] = 1;
2888     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
2889   }
2890 
2891   if (viewer) {
2892     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
2893     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
2894   }
2895   if (parallel) {
2896     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
2897     options[0] = 1;
2898     options[1] = 0; /* Verbosity */
2899     options[2] = 0; /* Seed */
2900     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
2901     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
2902     if (useInitialGuess) {
2903       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
2904       PetscStackPush("ParMETIS_V3_RefineKway");
2905       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
2906       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
2907       PetscStackPop;
2908     } else {
2909       PetscStackPush("ParMETIS_V3_PartKway");
2910       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
2911       PetscStackPop;
2912       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
2913     }
2914     ierr = PetscFree(options);CHKERRQ(ierr);
2915   } else {
2916     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
2917     Mat As;
2918     PetscInt numRows;
2919     PetscInt *partGlobal;
2920     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
2921 
2922     PetscInt *numExclusivelyOwnedAll;
2923     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
2924     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
2925     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
2926 
2927     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
2928     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
2929     if (!rank) {
2930       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
2931       lenadjncy = 0;
2932 
2933       for (i=0; i<numRows; i++) {
2934         PetscInt temp=0;
2935         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
2936         lenadjncy += temp;
2937         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
2938       }
2939       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
2940       lenxadj = 1 + numRows;
2941       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
2942       xadj_g[0] = 0;
2943       counter = 0;
2944       for (i=0; i<numRows; i++) {
2945         PetscInt        temp=0;
2946         const PetscInt *cols;
2947         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
2948         ierr = PetscMemcpy(&adjncy_g[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
2949         counter += temp;
2950         xadj_g[i+1] = counter;
2951         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
2952       }
2953       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
2954       for (i=0; i<size; i++){
2955         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
2956         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
2957         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
2958           vtxwgt_g[ncon*j] = 1;
2959           if (ncon>1) vtxwgt_g[2*j+1] = 0;
2960         }
2961       }
2962       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
2963       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
2964       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
2965       options[METIS_OPTION_CONTIG] = 1;
2966       PetscStackPush("METIS_PartGraphKway");
2967       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
2968       PetscStackPop;
2969       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
2970       ierr = PetscFree(options);CHKERRQ(ierr);
2971       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
2972       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
2973       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
2974     }
2975     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
2976 
2977     /* Now scatter the parts array. */
2978     {
2979       PetscMPIInt *counts, *mpiCumSumVertices;
2980       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
2981       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
2982       for(i=0; i<size; i++) {
2983         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
2984       }
2985       for(i=0; i<=size; i++) {
2986         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
2987       }
2988       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
2989       ierr = PetscFree(counts);CHKERRQ(ierr);
2990       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
2991     }
2992 
2993     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
2994     ierr = MatDestroy(&As);CHKERRQ(ierr);
2995   }
2996 
2997   ierr = MatDestroy(&A);CHKERRQ(ierr);
2998   ierr = PetscFree(ubvec);CHKERRQ(ierr);
2999   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3000 
3001   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3002 
3003   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3004   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3005   firstVertices[rank] = part[0];
3006   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3007   for (i=0; i<size; i++) {
3008     renumbering[firstVertices[i]] = i;
3009   }
3010   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3011     part[i] = renumbering[part[i]];
3012   }
3013   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3014   failed = (PetscInt)(part[0] != rank);
3015   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3016 
3017   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3018   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3019 
3020   if (failedGlobal > 0) {
3021     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3022     ierr = PetscFree(xadj);CHKERRQ(ierr);
3023     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3024     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3025     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3026     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3027     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3028     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3029     ierr = PetscFree(part);CHKERRQ(ierr);
3030     if (viewer) {
3031       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3032       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3033     }
3034     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3035     PetscFunctionReturn(0);
3036   }
3037 
3038   /*Let's check how well we did distributing points*/
3039   if (viewer) {
3040     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3041     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3042     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3043     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3044     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3045   }
3046 
3047   /* Now check that every vertex is owned by a process that it is actually connected to. */
3048   for (i=1; i<=numNonExclusivelyOwned; i++) {
3049     PetscInt loc = 0;
3050     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3051     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3052     if (loc<0) {
3053       part[i] = rank;
3054     }
3055   }
3056 
3057   /* Let's see how significant the influences of the previous fixing up step was.*/
3058   if (viewer) {
3059     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3060     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3061   }
3062 
3063   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3064   ierr = PetscFree(xadj);CHKERRQ(ierr);
3065   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3066   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3067 
3068   /* Almost done, now rewrite the SF to reflect the new ownership. */
3069   {
3070     PetscInt *pointsToRewrite;
3071     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3072     counter = 0;
3073     for(i=0; i<pEnd-pStart; i++) {
3074       if (toBalance[i]) {
3075         if (isNonExclusivelyOwned[i]) {
3076           pointsToRewrite[counter] = i + pStart;
3077           counter++;
3078         }
3079       }
3080     }
3081     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3082     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3083   }
3084 
3085   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3086   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3087   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3088   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3089   ierr = PetscFree(part);CHKERRQ(ierr);
3090   if (viewer) {
3091     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3092     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3093   }
3094   if (success) *success = PETSC_TRUE;
3095   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3096 #else
3097   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3098 #endif
3099   PetscFunctionReturn(0);
3100 }
3101