xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 6a0bd7522af7eb76a79ca5f1af77fc9a30e8cdb2)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
31 
32 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
33 {
34   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
35   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
36   IS             cellNumbering;
37   const PetscInt *cellNum;
38   PetscBool      useCone, useClosure;
39   PetscSection   section;
40   PetscSegBuffer adjBuffer;
41   PetscSF        sfPoint;
42   PetscInt       *adjCells = NULL, *remoteCells = NULL;
43   const PetscInt *local;
44   PetscInt       nroots, nleaves, l;
45   PetscMPIInt    rank;
46   PetscErrorCode ierr;
47 
48   PetscFunctionBegin;
49   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
50   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
51   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
52   if (dim != depth) {
53     /* We do not handle the uninterpolated case here */
54     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
55     /* DMPlexCreateNeighborCSR does not make a numbering */
56     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
57     /* Different behavior for empty graphs */
58     if (!*numVertices) {
59       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
60       (*offsets)[0] = 0;
61     }
62     /* Broken in parallel */
63     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
64     PetscFunctionReturn(0);
65   }
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
74   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
75   ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
76   if (globalNumbering) {
77     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
78     *globalNumbering = cellNumbering;
79   }
80   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
82      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
83    */
84   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
85   if (nroots >= 0) {
86     PetscInt fStart, fEnd, f;
87 
88     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
89     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
90     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
91     for (f = fStart; f < fEnd; ++f) {
92       const PetscInt *support;
93       PetscInt        supportSize;
94 
95       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
96       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
97       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
98       else if (supportSize == 2) {
99         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
100         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
101         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
102         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
103       }
104       /* Handle non-conforming meshes */
105       if (supportSize > 2) {
106         PetscInt        numChildren, i;
107         const PetscInt *children;
108 
109         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
110         for (i = 0; i < numChildren; ++i) {
111           const PetscInt child = children[i];
112           if (fStart <= child && child < fEnd) {
113             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
114             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
115             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
116             else if (supportSize == 2) {
117               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
118               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
119               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
120               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
121             }
122           }
123         }
124       }
125     }
126     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
127     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
128     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
129     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
130     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
131   }
132   /* Combine local and global adjacencies */
133   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
134     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
135     if (nroots > 0) {if (cellNum[p] < 0) continue;}
136     /* Add remote cells */
137     if (remoteCells) {
138       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
139       PetscInt       coneSize, numChildren, c, i;
140       const PetscInt *cone, *children;
141 
142       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
143       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
144       for (c = 0; c < coneSize; ++c) {
145         const PetscInt point = cone[c];
146         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
147           PetscInt *PETSC_RESTRICT pBuf;
148           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
149           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
150           *pBuf = remoteCells[point];
151         }
152         /* Handle non-conforming meshes */
153         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
154         for (i = 0; i < numChildren; ++i) {
155           const PetscInt child = children[i];
156           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
157             PetscInt *PETSC_RESTRICT pBuf;
158             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
159             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
160             *pBuf = remoteCells[child];
161           }
162         }
163       }
164     }
165     /* Add local cells */
166     adjSize = PETSC_DETERMINE;
167     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
168     for (a = 0; a < adjSize; ++a) {
169       const PetscInt point = adj[a];
170       if (point != p && pStart <= point && point < pEnd) {
171         PetscInt *PETSC_RESTRICT pBuf;
172         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
173         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
174         *pBuf = DMPlex_GlobalID(cellNum[point]);
175       }
176     }
177     (*numVertices)++;
178   }
179   ierr = PetscFree(adj);CHKERRQ(ierr);
180   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
181   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
182 
183   /* Derive CSR graph from section/segbuffer */
184   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
185   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
186   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
187   for (idx = 0, p = pStart; p < pEnd; p++) {
188     if (nroots > 0) {if (cellNum[p] < 0) continue;}
189     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
190   }
191   vOffsets[*numVertices] = size;
192   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
193 
194   if (nroots >= 0) {
195     /* Filter out duplicate edges using section/segbuffer */
196     ierr = PetscSectionReset(section);CHKERRQ(ierr);
197     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
198     for (p = 0; p < *numVertices; p++) {
199       PetscInt start = vOffsets[p], end = vOffsets[p+1];
200       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
201       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
202       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
203       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
204       ierr = PetscMemcpy(edges, &graph[start], numEdges*sizeof(*edges));CHKERRQ(ierr);
205     }
206     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
207     ierr = PetscFree(graph);CHKERRQ(ierr);
208     /* Derive CSR graph from section/segbuffer */
209     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
210     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
211     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
212     for (idx = 0, p = 0; p < *numVertices; p++) {
213       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
214     }
215     vOffsets[*numVertices] = size;
216     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
217   } else {
218     /* Sort adjacencies (not strictly necessary) */
219     for (p = 0; p < *numVertices; p++) {
220       PetscInt start = vOffsets[p], end = vOffsets[p+1];
221       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
222     }
223   }
224 
225   if (offsets) *offsets = vOffsets;
226   if (adjacency) *adjacency = graph;
227 
228   /* Cleanup */
229   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
230   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
231   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
232   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
233   PetscFunctionReturn(0);
234 }
235 
236 static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
237 {
238   Mat            conn, CSR;
239   IS             fis, cis, cis_own;
240   PetscSF        sfPoint;
241   const PetscInt *rows, *cols, *ii, *jj;
242   PetscInt       *idxs,*idxs2;
243   PetscInt       dim, depth, floc, cloc, i, M, N, c, m, cStart, cEnd, fStart, fEnd;
244   PetscMPIInt    rank;
245   PetscBool      flg;
246   PetscErrorCode ierr;
247 
248   PetscFunctionBegin;
249   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
250   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
251   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
252   if (dim != depth) {
253     /* We do not handle the uninterpolated case here */
254     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
255     /* DMPlexCreateNeighborCSR does not make a numbering */
256     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
257     /* Different behavior for empty graphs */
258     if (!*numVertices) {
259       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
260       (*offsets)[0] = 0;
261     }
262     /* Broken in parallel */
263     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
264     PetscFunctionReturn(0);
265   }
266   /* Interpolated and parallel case */
267 
268   /* numbering */
269   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
270   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
271   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
272   ierr = DMPlexCreateNumbering_Internal(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
273   ierr = DMPlexCreateNumbering_Internal(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
274   if (globalNumbering) {
275     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
276   }
277 
278   /* get positive global ids and local sizes for facets and cells */
279   ierr = ISGetLocalSize(fis, &m);CHKERRQ(ierr);
280   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
281   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
282   for (i = 0, floc = 0; i < m; i++) {
283     const PetscInt p = rows[i];
284 
285     if (p < 0) {
286       idxs[i] = -(p+1);
287     } else {
288       idxs[i] = p;
289       floc   += 1;
290     }
291   }
292   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
293   ierr = ISDestroy(&fis);CHKERRQ(ierr);
294   ierr = ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);CHKERRQ(ierr);
295 
296   ierr = ISGetLocalSize(cis, &m);CHKERRQ(ierr);
297   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
298   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
299   ierr = PetscMalloc1(m, &idxs2);CHKERRQ(ierr);
300   for (i = 0, cloc = 0; i < m; i++) {
301     const PetscInt p = cols[i];
302 
303     if (p < 0) {
304       idxs[i] = -(p+1);
305     } else {
306       idxs[i]       = p;
307       idxs2[cloc++] = p;
308     }
309   }
310   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
311   ierr = ISDestroy(&cis);CHKERRQ(ierr);
312   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);CHKERRQ(ierr);
313   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);CHKERRQ(ierr);
314 
315   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
316   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
317   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
318   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
319   ierr = DMPlexGetMaxSizes(dm, NULL, &m);CHKERRQ(ierr);
320   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
321 
322   /* Assemble matrix */
323   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
324   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
325   for (c = cStart; c < cEnd; c++) {
326     const PetscInt *cone;
327     PetscInt        coneSize, row, col, f;
328 
329     col  = cols[c-cStart];
330     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
331     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
332     for (f = 0; f < coneSize; f++) {
333       const PetscScalar v = 1.0;
334       const PetscInt *children;
335       PetscInt        numChildren, ch;
336 
337       row  = rows[cone[f]-fStart];
338       ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
339 
340       /* non-conforming meshes */
341       ierr = DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);CHKERRQ(ierr);
342       for (ch = 0; ch < numChildren; ch++) {
343         const PetscInt child = children[ch];
344 
345         if (child < fStart || child >= fEnd) continue;
346         row  = rows[child-fStart];
347         ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
348       }
349     }
350   }
351   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
352   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
353   ierr = ISDestroy(&fis);CHKERRQ(ierr);
354   ierr = ISDestroy(&cis);CHKERRQ(ierr);
355   ierr = MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
356   ierr = MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
357 
358   /* Get parallel CSR by doing conn^T * conn */
359   ierr = MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);CHKERRQ(ierr);
360   ierr = MatDestroy(&conn);CHKERRQ(ierr);
361 
362   /* extract local part of the CSR */
363   ierr = MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);CHKERRQ(ierr);
364   ierr = MatDestroy(&CSR);CHKERRQ(ierr);
365   ierr = MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
366   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
367 
368   /* get back requested output */
369   if (numVertices) *numVertices = m;
370   if (offsets) {
371     ierr = PetscCalloc1(m+1, &idxs);CHKERRQ(ierr);
372     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
373     *offsets = idxs;
374   }
375   if (adjacency) {
376     ierr = PetscMalloc1(ii[m] - m, &idxs);CHKERRQ(ierr);
377     ierr = ISGetIndices(cis_own, &rows);CHKERRQ(ierr);
378     for (i = 0, c = 0; i < m; i++) {
379       PetscInt j, g = rows[i];
380 
381       for (j = ii[i]; j < ii[i+1]; j++) {
382         if (jj[j] == g) continue; /* again, self-connectivity */
383         idxs[c++] = jj[j];
384       }
385     }
386     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
387     ierr = ISRestoreIndices(cis_own, &rows);CHKERRQ(ierr);
388     *adjacency = idxs;
389   }
390 
391   /* cleanup */
392   ierr = ISDestroy(&cis_own);CHKERRQ(ierr);
393   ierr = MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
394   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
395   ierr = MatDestroy(&conn);CHKERRQ(ierr);
396   PetscFunctionReturn(0);
397 }
398 
399 /*@C
400   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
401 
402   Input Parameters:
403 + dm      - The mesh DM dm
404 - height  - Height of the strata from which to construct the graph
405 
406   Output Parameter:
407 + numVertices     - Number of vertices in the graph
408 . offsets         - Point offsets in the graph
409 . adjacency       - Point connectivity in the graph
410 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
411 
412   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
413   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().
414 
415   Level: developer
416 
417 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
418 @*/
419 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
420 {
421   PetscErrorCode ierr;
422   PetscBool      usemat = PETSC_FALSE;
423 
424   PetscFunctionBegin;
425   ierr = PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);CHKERRQ(ierr);
426   if (usemat) {
427     ierr = DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
428   } else {
429     ierr = DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
430   }
431   PetscFunctionReturn(0);
432 }
433 
434 /*@C
435   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
436 
437   Collective
438 
439   Input Arguments:
440 + dm - The DMPlex
441 - cellHeight - The height of mesh points to treat as cells (default should be 0)
442 
443   Output Arguments:
444 + numVertices - The number of local vertices in the graph, or cells in the mesh.
445 . offsets     - The offset to the adjacency list for each cell
446 - adjacency   - The adjacency list for all cells
447 
448   Note: This is suitable for input to a mesh partitioner like ParMetis.
449 
450   Level: advanced
451 
452 .seealso: DMPlexCreate()
453 @*/
454 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
455 {
456   const PetscInt maxFaceCases = 30;
457   PetscInt       numFaceCases = 0;
458   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
459   PetscInt      *off, *adj;
460   PetscInt      *neighborCells = NULL;
461   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   /* For parallel partitioning, I think you have to communicate supports */
466   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
467   cellDim = dim - cellHeight;
468   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
469   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
470   if (cEnd - cStart == 0) {
471     if (numVertices) *numVertices = 0;
472     if (offsets)   *offsets   = NULL;
473     if (adjacency) *adjacency = NULL;
474     PetscFunctionReturn(0);
475   }
476   numCells  = cEnd - cStart;
477   faceDepth = depth - cellHeight;
478   if (dim == depth) {
479     PetscInt f, fStart, fEnd;
480 
481     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
482     /* Count neighboring cells */
483     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
484     for (f = fStart; f < fEnd; ++f) {
485       const PetscInt *support;
486       PetscInt        supportSize;
487       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
488       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
489       if (supportSize == 2) {
490         PetscInt numChildren;
491 
492         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
493         if (!numChildren) {
494           ++off[support[0]-cStart+1];
495           ++off[support[1]-cStart+1];
496         }
497       }
498     }
499     /* Prefix sum */
500     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
501     if (adjacency) {
502       PetscInt *tmp;
503 
504       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
505       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
506       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
507       /* Get neighboring cells */
508       for (f = fStart; f < fEnd; ++f) {
509         const PetscInt *support;
510         PetscInt        supportSize;
511         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
512         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
513         if (supportSize == 2) {
514           PetscInt numChildren;
515 
516           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
517           if (!numChildren) {
518             adj[tmp[support[0]-cStart]++] = support[1];
519             adj[tmp[support[1]-cStart]++] = support[0];
520           }
521         }
522       }
523 #if defined(PETSC_USE_DEBUG)
524       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
525 #endif
526       ierr = PetscFree(tmp);CHKERRQ(ierr);
527     }
528     if (numVertices) *numVertices = numCells;
529     if (offsets)   *offsets   = off;
530     if (adjacency) *adjacency = adj;
531     PetscFunctionReturn(0);
532   }
533   /* Setup face recognition */
534   if (faceDepth == 1) {
535     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
536 
537     for (c = cStart; c < cEnd; ++c) {
538       PetscInt corners;
539 
540       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
541       if (!cornersSeen[corners]) {
542         PetscInt nFV;
543 
544         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
545         cornersSeen[corners] = 1;
546 
547         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
548 
549         numFaceVertices[numFaceCases++] = nFV;
550       }
551     }
552   }
553   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
554   /* Count neighboring cells */
555   for (cell = cStart; cell < cEnd; ++cell) {
556     PetscInt numNeighbors = PETSC_DETERMINE, n;
557 
558     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
559     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
560     for (n = 0; n < numNeighbors; ++n) {
561       PetscInt        cellPair[2];
562       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
563       PetscInt        meetSize = 0;
564       const PetscInt *meet    = NULL;
565 
566       cellPair[0] = cell; cellPair[1] = neighborCells[n];
567       if (cellPair[0] == cellPair[1]) continue;
568       if (!found) {
569         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
570         if (meetSize) {
571           PetscInt f;
572 
573           for (f = 0; f < numFaceCases; ++f) {
574             if (numFaceVertices[f] == meetSize) {
575               found = PETSC_TRUE;
576               break;
577             }
578           }
579         }
580         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
581       }
582       if (found) ++off[cell-cStart+1];
583     }
584   }
585   /* Prefix sum */
586   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
587 
588   if (adjacency) {
589     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
590     /* Get neighboring cells */
591     for (cell = cStart; cell < cEnd; ++cell) {
592       PetscInt numNeighbors = PETSC_DETERMINE, n;
593       PetscInt cellOffset   = 0;
594 
595       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
596       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
597       for (n = 0; n < numNeighbors; ++n) {
598         PetscInt        cellPair[2];
599         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
600         PetscInt        meetSize = 0;
601         const PetscInt *meet    = NULL;
602 
603         cellPair[0] = cell; cellPair[1] = neighborCells[n];
604         if (cellPair[0] == cellPair[1]) continue;
605         if (!found) {
606           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
607           if (meetSize) {
608             PetscInt f;
609 
610             for (f = 0; f < numFaceCases; ++f) {
611               if (numFaceVertices[f] == meetSize) {
612                 found = PETSC_TRUE;
613                 break;
614               }
615             }
616           }
617           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
618         }
619         if (found) {
620           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
621           ++cellOffset;
622         }
623       }
624     }
625   }
626   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
627   if (numVertices) *numVertices = numCells;
628   if (offsets)   *offsets   = off;
629   if (adjacency) *adjacency = adj;
630   PetscFunctionReturn(0);
631 }
632 
633 /*@C
634   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
635 
636   Not Collective
637 
638   Input Parameters:
639 + name        - The name of a new user-defined creation routine
640 - create_func - The creation routine itself
641 
642   Notes:
643   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
644 
645   Sample usage:
646 .vb
647     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
648 .ve
649 
650   Then, your PetscPartitioner type can be chosen with the procedural interface via
651 .vb
652     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
653     PetscPartitionerSetType(PetscPartitioner, "my_part");
654 .ve
655    or at runtime via the option
656 .vb
657     -petscpartitioner_type my_part
658 .ve
659 
660   Level: advanced
661 
662 .keywords: PetscPartitioner, register
663 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
664 
665 @*/
666 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
667 {
668   PetscErrorCode ierr;
669 
670   PetscFunctionBegin;
671   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
672   PetscFunctionReturn(0);
673 }
674 
675 /*@C
676   PetscPartitionerSetType - Builds a particular PetscPartitioner
677 
678   Collective on PetscPartitioner
679 
680   Input Parameters:
681 + part - The PetscPartitioner object
682 - name - The kind of partitioner
683 
684   Options Database Key:
685 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
686 
687   Level: intermediate
688 
689 .keywords: PetscPartitioner, set, type
690 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
691 @*/
692 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
693 {
694   PetscErrorCode (*r)(PetscPartitioner);
695   PetscBool      match;
696   PetscErrorCode ierr;
697 
698   PetscFunctionBegin;
699   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
700   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
701   if (match) PetscFunctionReturn(0);
702 
703   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
704   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
705   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
706 
707   if (part->ops->destroy) {
708     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
709   }
710   part->noGraph = PETSC_FALSE;
711   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
712   ierr = (*r)(part);CHKERRQ(ierr);
713   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
714   PetscFunctionReturn(0);
715 }
716 
717 /*@C
718   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
719 
720   Not Collective
721 
722   Input Parameter:
723 . part - The PetscPartitioner
724 
725   Output Parameter:
726 . name - The PetscPartitioner type name
727 
728   Level: intermediate
729 
730 .keywords: PetscPartitioner, get, type, name
731 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
732 @*/
733 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
734 {
735   PetscErrorCode ierr;
736 
737   PetscFunctionBegin;
738   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
739   PetscValidPointer(name, 2);
740   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
741   *name = ((PetscObject) part)->type_name;
742   PetscFunctionReturn(0);
743 }
744 
745 /*@C
746   PetscPartitionerView - Views a PetscPartitioner
747 
748   Collective on PetscPartitioner
749 
750   Input Parameter:
751 + part - the PetscPartitioner object to view
752 - v    - the viewer
753 
754   Level: developer
755 
756 .seealso: PetscPartitionerDestroy()
757 @*/
758 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
759 {
760   PetscMPIInt    size;
761   PetscBool      isascii;
762   PetscErrorCode ierr;
763 
764   PetscFunctionBegin;
765   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
766   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
767   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
768   if (isascii) {
769     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
770     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
771     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
772     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
773     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
774     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
775     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
776   }
777   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
778   PetscFunctionReturn(0);
779 }
780 
781 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
782 {
783   PetscFunctionBegin;
784   if (!currentType) {
785 #if defined(PETSC_HAVE_PARMETIS)
786     *defaultType = PETSCPARTITIONERPARMETIS;
787 #elif defined(PETSC_HAVE_PTSCOTCH)
788     *defaultType = PETSCPARTITIONERPTSCOTCH;
789 #elif defined(PETSC_HAVE_CHACO)
790     *defaultType = PETSCPARTITIONERCHACO;
791 #else
792     *defaultType = PETSCPARTITIONERSIMPLE;
793 #endif
794   } else {
795     *defaultType = currentType;
796   }
797   PetscFunctionReturn(0);
798 }
799 
800 /*@
801   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
802 
803   Collective on PetscPartitioner
804 
805   Input Parameter:
806 . part - the PetscPartitioner object to set options for
807 
808   Level: developer
809 
810 .seealso: PetscPartitionerView()
811 @*/
812 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
813 {
814   const char    *defaultType;
815   char           name[256];
816   PetscBool      flg;
817   PetscErrorCode ierr;
818 
819   PetscFunctionBegin;
820   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
821   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
822   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
823   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
824   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
825   if (flg) {
826     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
827   } else if (!((PetscObject) part)->type_name) {
828     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
829   }
830   if (part->ops->setfromoptions) {
831     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
832   }
833   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
834   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
835   /* process any options handlers added with PetscObjectAddOptionsHandler() */
836   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
837   ierr = PetscOptionsEnd();CHKERRQ(ierr);
838   PetscFunctionReturn(0);
839 }
840 
841 /*@C
842   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
843 
844   Collective on PetscPartitioner
845 
846   Input Parameter:
847 . part - the PetscPartitioner object to setup
848 
849   Level: developer
850 
851 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
852 @*/
853 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
854 {
855   PetscErrorCode ierr;
856 
857   PetscFunctionBegin;
858   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
859   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
860   PetscFunctionReturn(0);
861 }
862 
863 /*@
864   PetscPartitionerDestroy - Destroys a PetscPartitioner object
865 
866   Collective on PetscPartitioner
867 
868   Input Parameter:
869 . part - the PetscPartitioner object to destroy
870 
871   Level: developer
872 
873 .seealso: PetscPartitionerView()
874 @*/
875 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
876 {
877   PetscErrorCode ierr;
878 
879   PetscFunctionBegin;
880   if (!*part) PetscFunctionReturn(0);
881   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
882 
883   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
884   ((PetscObject) (*part))->refct = 0;
885 
886   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
887   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
888   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 /*@
893   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
894 
895   Collective on MPI_Comm
896 
897   Input Parameter:
898 . comm - The communicator for the PetscPartitioner object
899 
900   Output Parameter:
901 . part - The PetscPartitioner object
902 
903   Level: beginner
904 
905 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
906 @*/
907 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
908 {
909   PetscPartitioner p;
910   const char       *partitionerType = NULL;
911   PetscErrorCode   ierr;
912 
913   PetscFunctionBegin;
914   PetscValidPointer(part, 2);
915   *part = NULL;
916   ierr = DMInitializePackage();CHKERRQ(ierr);
917 
918   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
919   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
920   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
921 
922   p->edgeCut = 0;
923   p->balance = 0.0;
924 
925   *part = p;
926   PetscFunctionReturn(0);
927 }
928 
929 /*@
930   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
931 
932   Collective on DM
933 
934   Input Parameters:
935 + part    - The PetscPartitioner
936 - dm      - The mesh DM
937 
938   Output Parameters:
939 + partSection     - The PetscSection giving the division of points by partition
940 - partition       - The list of points by partition
941 
942   Options Database:
943 . -petscpartitioner_view_graph - View the graph we are partitioning
944 
945   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
946 
947   Level: developer
948 
949 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
950 @*/
951 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
952 {
953   PetscMPIInt    size;
954   PetscErrorCode ierr;
955 
956   PetscFunctionBegin;
957   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
958   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
959   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
960   PetscValidPointer(partition, 5);
961   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
962   if (size == 1) {
963     PetscInt *points;
964     PetscInt  cStart, cEnd, c;
965 
966     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
967     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
968     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
969     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
970     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
971     for (c = cStart; c < cEnd; ++c) points[c] = c;
972     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
973     PetscFunctionReturn(0);
974   }
975   if (part->height == 0) {
976     PetscInt numVertices = 0;
977     PetscInt *start     = NULL;
978     PetscInt *adjacency = NULL;
979     IS       globalNumbering;
980 
981     if (!part->noGraph || part->viewGraph) {
982       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
983     } else { /* only compute the number of owned local vertices */
984       const PetscInt *idxs;
985       PetscInt       p, pStart, pEnd;
986 
987       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
988       ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
989       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
990       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
991       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
992     }
993     if (part->viewGraph) {
994       PetscViewer viewer = part->viewerGraph;
995       PetscBool   isascii;
996       PetscInt    v, i;
997       PetscMPIInt rank;
998 
999       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
1000       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
1001       if (isascii) {
1002         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1003         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
1004         for (v = 0; v < numVertices; ++v) {
1005           const PetscInt s = start[v];
1006           const PetscInt e = start[v+1];
1007 
1008           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
1009           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
1010           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
1011         }
1012         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1013         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1014       }
1015     }
1016     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no partitioning method");
1017     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
1018     ierr = PetscFree(start);CHKERRQ(ierr);
1019     ierr = PetscFree(adjacency);CHKERRQ(ierr);
1020     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1021       const PetscInt *globalNum;
1022       const PetscInt *partIdx;
1023       PetscInt       *map, cStart, cEnd;
1024       PetscInt       *adjusted, i, localSize, offset;
1025       IS             newPartition;
1026 
1027       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
1028       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
1029       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1030       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
1031       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
1032       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1033       for (i = cStart, offset = 0; i < cEnd; i++) {
1034         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1035       }
1036       for (i = 0; i < localSize; i++) {
1037         adjusted[i] = map[partIdx[i]];
1038       }
1039       ierr = PetscFree(map);CHKERRQ(ierr);
1040       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
1041       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1042       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
1043       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
1044       ierr = ISDestroy(partition);CHKERRQ(ierr);
1045       *partition = newPartition;
1046     }
1047   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1048   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
1049   PetscFunctionReturn(0);
1050 }
1051 
1052 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1053 {
1054   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1055   PetscErrorCode          ierr;
1056 
1057   PetscFunctionBegin;
1058   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1059   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1060   ierr = PetscFree(p);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1065 {
1066   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1067   PetscErrorCode          ierr;
1068 
1069   PetscFunctionBegin;
1070   if (p->random) {
1071     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1072     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
1073     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1074   }
1075   PetscFunctionReturn(0);
1076 }
1077 
1078 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1079 {
1080   PetscBool      iascii;
1081   PetscErrorCode ierr;
1082 
1083   PetscFunctionBegin;
1084   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1085   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1086   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1087   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1092 {
1093   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1094   PetscErrorCode          ierr;
1095 
1096   PetscFunctionBegin;
1097   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
1098   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
1099   ierr = PetscOptionsTail();CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1104 {
1105   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1106   PetscInt                np;
1107   PetscErrorCode          ierr;
1108 
1109   PetscFunctionBegin;
1110   if (p->random) {
1111     PetscRandom r;
1112     PetscInt   *sizes, *points, v, p;
1113     PetscMPIInt rank;
1114 
1115     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1116     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1117     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
1118     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
1119     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
1120     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1121     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1122     for (v = numVertices-1; v > 0; --v) {
1123       PetscReal val;
1124       PetscInt  w, tmp;
1125 
1126       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
1127       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
1128       w    = PetscFloorReal(val);
1129       tmp       = points[v];
1130       points[v] = points[w];
1131       points[w] = tmp;
1132     }
1133     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1134     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
1135     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
1136   }
1137   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1138   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
1139   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1140   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
1141   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1142   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
1143   *partition = p->partition;
1144   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1149 {
1150   PetscFunctionBegin;
1151   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1152   part->ops->view           = PetscPartitionerView_Shell;
1153   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1154   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1155   part->ops->partition      = PetscPartitionerPartition_Shell;
1156   PetscFunctionReturn(0);
1157 }
1158 
1159 /*MC
1160   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
1161 
1162   Level: intermediate
1163 
1164 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1165 M*/
1166 
1167 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1168 {
1169   PetscPartitioner_Shell *p;
1170   PetscErrorCode          ierr;
1171 
1172   PetscFunctionBegin;
1173   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1174   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1175   part->data = p;
1176 
1177   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1178   p->random = PETSC_FALSE;
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 /*@C
1183   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1184 
1185   Collective on Part
1186 
1187   Input Parameters:
1188 + part   - The PetscPartitioner
1189 . size   - The number of partitions
1190 . sizes  - array of size size (or NULL) providing the number of points in each partition
1191 - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1192 
1193   Level: developer
1194 
1195   Notes:
1196 
1197     It is safe to free the sizes and points arrays after use in this routine.
1198 
1199 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1200 @*/
1201 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1202 {
1203   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1204   PetscInt                proc, numPoints;
1205   PetscErrorCode          ierr;
1206 
1207   PetscFunctionBegin;
1208   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1209   if (sizes)  {PetscValidPointer(sizes, 3);}
1210   if (points) {PetscValidPointer(points, 4);}
1211   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1212   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1213   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1214   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1215   if (sizes) {
1216     for (proc = 0; proc < size; ++proc) {
1217       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1218     }
1219   }
1220   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1221   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1222   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 /*@
1227   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1228 
1229   Collective on Part
1230 
1231   Input Parameters:
1232 + part   - The PetscPartitioner
1233 - random - The flag to use a random partition
1234 
1235   Level: intermediate
1236 
1237 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1238 @*/
1239 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1240 {
1241   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1242 
1243   PetscFunctionBegin;
1244   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1245   p->random = random;
1246   PetscFunctionReturn(0);
1247 }
1248 
1249 /*@
1250   PetscPartitionerShellGetRandom - get the flag to use a random partition
1251 
1252   Collective on Part
1253 
1254   Input Parameter:
1255 . part   - The PetscPartitioner
1256 
1257   Output Parameter
1258 . random - The flag to use a random partition
1259 
1260   Level: intermediate
1261 
1262 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1263 @*/
1264 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1265 {
1266   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1267 
1268   PetscFunctionBegin;
1269   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1270   PetscValidPointer(random, 2);
1271   *random = p->random;
1272   PetscFunctionReturn(0);
1273 }
1274 
1275 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1276 {
1277   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1278   PetscErrorCode          ierr;
1279 
1280   PetscFunctionBegin;
1281   ierr = PetscFree(p);CHKERRQ(ierr);
1282   PetscFunctionReturn(0);
1283 }
1284 
1285 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1286 {
1287   PetscFunctionBegin;
1288   PetscFunctionReturn(0);
1289 }
1290 
1291 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1292 {
1293   PetscBool      iascii;
1294   PetscErrorCode ierr;
1295 
1296   PetscFunctionBegin;
1297   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1298   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1299   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1300   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1301   PetscFunctionReturn(0);
1302 }
1303 
1304 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1305 {
1306   MPI_Comm       comm;
1307   PetscInt       np;
1308   PetscMPIInt    size;
1309   PetscErrorCode ierr;
1310 
1311   PetscFunctionBegin;
1312   comm = PetscObjectComm((PetscObject)part);
1313   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1314   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1315   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1316   if (size == 1) {
1317     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1318   } else {
1319     PetscMPIInt rank;
1320     PetscInt nvGlobal, *offsets, myFirst, myLast;
1321 
1322     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1323     offsets[0] = 0;
1324     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1325     for (np = 2; np <= size; np++) {
1326       offsets[np] += offsets[np-1];
1327     }
1328     nvGlobal = offsets[size];
1329     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1330     myFirst = offsets[rank];
1331     myLast  = offsets[rank + 1] - 1;
1332     ierr = PetscFree(offsets);CHKERRQ(ierr);
1333     if (numVertices) {
1334       PetscInt firstPart = 0, firstLargePart = 0;
1335       PetscInt lastPart = 0, lastLargePart = 0;
1336       PetscInt rem = nvGlobal % nparts;
1337       PetscInt pSmall = nvGlobal/nparts;
1338       PetscInt pBig = nvGlobal/nparts + 1;
1339 
1340 
1341       if (rem) {
1342         firstLargePart = myFirst / pBig;
1343         lastLargePart  = myLast  / pBig;
1344 
1345         if (firstLargePart < rem) {
1346           firstPart = firstLargePart;
1347         } else {
1348           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1349         }
1350         if (lastLargePart < rem) {
1351           lastPart = lastLargePart;
1352         } else {
1353           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1354         }
1355       } else {
1356         firstPart = myFirst / (nvGlobal/nparts);
1357         lastPart  = myLast  / (nvGlobal/nparts);
1358       }
1359 
1360       for (np = firstPart; np <= lastPart; np++) {
1361         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1362         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1363 
1364         PartStart = PetscMax(PartStart,myFirst);
1365         PartEnd   = PetscMin(PartEnd,myLast+1);
1366         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1367       }
1368     }
1369   }
1370   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1371   PetscFunctionReturn(0);
1372 }
1373 
1374 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1375 {
1376   PetscFunctionBegin;
1377   part->noGraph        = PETSC_TRUE;
1378   part->ops->view      = PetscPartitionerView_Simple;
1379   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1380   part->ops->partition = PetscPartitionerPartition_Simple;
1381   PetscFunctionReturn(0);
1382 }
1383 
1384 /*MC
1385   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1386 
1387   Level: intermediate
1388 
1389 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1390 M*/
1391 
1392 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1393 {
1394   PetscPartitioner_Simple *p;
1395   PetscErrorCode           ierr;
1396 
1397   PetscFunctionBegin;
1398   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1399   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1400   part->data = p;
1401 
1402   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1403   PetscFunctionReturn(0);
1404 }
1405 
1406 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1407 {
1408   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1409   PetscErrorCode          ierr;
1410 
1411   PetscFunctionBegin;
1412   ierr = PetscFree(p);CHKERRQ(ierr);
1413   PetscFunctionReturn(0);
1414 }
1415 
1416 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1417 {
1418   PetscFunctionBegin;
1419   PetscFunctionReturn(0);
1420 }
1421 
1422 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1423 {
1424   PetscBool      iascii;
1425   PetscErrorCode ierr;
1426 
1427   PetscFunctionBegin;
1428   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1429   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1430   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1431   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1432   PetscFunctionReturn(0);
1433 }
1434 
1435 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1436 {
1437   PetscInt       np;
1438   PetscErrorCode ierr;
1439 
1440   PetscFunctionBegin;
1441   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1442   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1443   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1444   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1445   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1446   PetscFunctionReturn(0);
1447 }
1448 
1449 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1450 {
1451   PetscFunctionBegin;
1452   part->noGraph        = PETSC_TRUE;
1453   part->ops->view      = PetscPartitionerView_Gather;
1454   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1455   part->ops->partition = PetscPartitionerPartition_Gather;
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 /*MC
1460   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1461 
1462   Level: intermediate
1463 
1464 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1465 M*/
1466 
1467 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1468 {
1469   PetscPartitioner_Gather *p;
1470   PetscErrorCode           ierr;
1471 
1472   PetscFunctionBegin;
1473   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1474   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1475   part->data = p;
1476 
1477   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1478   PetscFunctionReturn(0);
1479 }
1480 
1481 
1482 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1483 {
1484   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1485   PetscErrorCode          ierr;
1486 
1487   PetscFunctionBegin;
1488   ierr = PetscFree(p);CHKERRQ(ierr);
1489   PetscFunctionReturn(0);
1490 }
1491 
1492 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1493 {
1494   PetscFunctionBegin;
1495   PetscFunctionReturn(0);
1496 }
1497 
1498 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1499 {
1500   PetscBool      iascii;
1501   PetscErrorCode ierr;
1502 
1503   PetscFunctionBegin;
1504   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1505   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1506   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1507   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1508   PetscFunctionReturn(0);
1509 }
1510 
1511 #if defined(PETSC_HAVE_CHACO)
1512 #if defined(PETSC_HAVE_UNISTD_H)
1513 #include <unistd.h>
1514 #endif
1515 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1516 #include <chaco.h>
1517 #else
1518 /* Older versions of Chaco do not have an include file */
1519 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1520                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1521                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1522                        int mesh_dims[3], double *goal, int global_method, int local_method,
1523                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1524 #endif
1525 extern int FREE_GRAPH;
1526 #endif
1527 
1528 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1529 {
1530 #if defined(PETSC_HAVE_CHACO)
1531   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1532   MPI_Comm       comm;
1533   int            nvtxs          = numVertices; /* number of vertices in full graph */
1534   int           *vwgts          = NULL;   /* weights for all vertices */
1535   float         *ewgts          = NULL;   /* weights for all edges */
1536   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1537   char          *outassignname  = NULL;   /*  name of assignment output file */
1538   char          *outfilename    = NULL;   /* output file name */
1539   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1540   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1541   int            mesh_dims[3];            /* dimensions of mesh of processors */
1542   double        *goal          = NULL;    /* desired set sizes for each set */
1543   int            global_method = 1;       /* global partitioning algorithm */
1544   int            local_method  = 1;       /* local partitioning algorithm */
1545   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1546   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1547   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1548   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1549   long           seed          = 123636512; /* for random graph mutations */
1550 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1551   int           *assignment;              /* Output partition */
1552 #else
1553   short int     *assignment;              /* Output partition */
1554 #endif
1555   int            fd_stdout, fd_pipe[2];
1556   PetscInt      *points;
1557   int            i, v, p;
1558   PetscErrorCode ierr;
1559 
1560   PetscFunctionBegin;
1561   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1562 #if defined (PETSC_USE_DEBUG)
1563   {
1564     int ival,isum;
1565     PetscBool distributed;
1566 
1567     ival = (numVertices > 0);
1568     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1569     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1570     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1571   }
1572 #endif
1573   if (!numVertices) {
1574     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1575     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1576     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1577     PetscFunctionReturn(0);
1578   }
1579   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1580   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1581 
1582   if (global_method == INERTIAL_METHOD) {
1583     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1584     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1585   }
1586   mesh_dims[0] = nparts;
1587   mesh_dims[1] = 1;
1588   mesh_dims[2] = 1;
1589   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1590   /* Chaco outputs to stdout. We redirect this to a buffer. */
1591   /* TODO: check error codes for UNIX calls */
1592 #if defined(PETSC_HAVE_UNISTD_H)
1593   {
1594     int piperet;
1595     piperet = pipe(fd_pipe);
1596     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1597     fd_stdout = dup(1);
1598     close(1);
1599     dup2(fd_pipe[1], 1);
1600   }
1601 #endif
1602   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1603                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1604                    vmax, ndims, eigtol, seed);
1605 #if defined(PETSC_HAVE_UNISTD_H)
1606   {
1607     char msgLog[10000];
1608     int  count;
1609 
1610     fflush(stdout);
1611     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1612     if (count < 0) count = 0;
1613     msgLog[count] = 0;
1614     close(1);
1615     dup2(fd_stdout, 1);
1616     close(fd_stdout);
1617     close(fd_pipe[0]);
1618     close(fd_pipe[1]);
1619     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1620   }
1621 #else
1622   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1623 #endif
1624   /* Convert to PetscSection+IS */
1625   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1626   for (v = 0; v < nvtxs; ++v) {
1627     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1628   }
1629   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1630   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1631   for (p = 0, i = 0; p < nparts; ++p) {
1632     for (v = 0; v < nvtxs; ++v) {
1633       if (assignment[v] == p) points[i++] = v;
1634     }
1635   }
1636   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1637   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1638   if (global_method == INERTIAL_METHOD) {
1639     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1640   }
1641   ierr = PetscFree(assignment);CHKERRQ(ierr);
1642   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1643   PetscFunctionReturn(0);
1644 #else
1645   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1646 #endif
1647 }
1648 
1649 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1650 {
1651   PetscFunctionBegin;
1652   part->noGraph        = PETSC_FALSE;
1653   part->ops->view      = PetscPartitionerView_Chaco;
1654   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1655   part->ops->partition = PetscPartitionerPartition_Chaco;
1656   PetscFunctionReturn(0);
1657 }
1658 
1659 /*MC
1660   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1661 
1662   Level: intermediate
1663 
1664 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1665 M*/
1666 
1667 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1668 {
1669   PetscPartitioner_Chaco *p;
1670   PetscErrorCode          ierr;
1671 
1672   PetscFunctionBegin;
1673   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1674   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1675   part->data = p;
1676 
1677   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1678   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1679   PetscFunctionReturn(0);
1680 }
1681 
1682 static const char *ptypes[] = {"kway", "rb"};
1683 
1684 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1685 {
1686   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1687   PetscErrorCode             ierr;
1688 
1689   PetscFunctionBegin;
1690   ierr = PetscFree(p);CHKERRQ(ierr);
1691   PetscFunctionReturn(0);
1692 }
1693 
1694 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1695 {
1696   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1697   PetscErrorCode             ierr;
1698 
1699   PetscFunctionBegin;
1700   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1701   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1702   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1703   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1704   ierr = PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);CHKERRQ(ierr);
1705   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1706   PetscFunctionReturn(0);
1707 }
1708 
1709 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1710 {
1711   PetscBool      iascii;
1712   PetscErrorCode ierr;
1713 
1714   PetscFunctionBegin;
1715   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1716   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1717   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1718   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1719   PetscFunctionReturn(0);
1720 }
1721 
1722 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1723 {
1724   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1725   PetscErrorCode            ierr;
1726 
1727   PetscFunctionBegin;
1728   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1729   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1730   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1731   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1732   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);CHKERRQ(ierr);
1733   ierr = PetscOptionsTail();CHKERRQ(ierr);
1734   PetscFunctionReturn(0);
1735 }
1736 
1737 #if defined(PETSC_HAVE_PARMETIS)
1738 #include <parmetis.h>
1739 #endif
1740 
1741 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1742 {
1743 #if defined(PETSC_HAVE_PARMETIS)
1744   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1745   MPI_Comm       comm;
1746   PetscSection   section;
1747   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1748   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1749   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1750   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1751   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1752   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1753   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1754   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1755   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1756   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1757   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1758   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1759   PetscInt       options[64];               /* Options */
1760   /* Outputs */
1761   PetscInt       v, i, *assignment, *points;
1762   PetscMPIInt    size, rank, p;
1763   PetscErrorCode ierr;
1764 
1765   PetscFunctionBegin;
1766   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1767   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1768   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1769   /* Calculate vertex distribution */
1770   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1771   vtxdist[0] = 0;
1772   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1773   for (p = 2; p <= size; ++p) {
1774     vtxdist[p] += vtxdist[p-1];
1775   }
1776   /* Calculate partition weights */
1777   for (p = 0; p < nparts; ++p) {
1778     tpwgts[p] = 1.0/nparts;
1779   }
1780   ubvec[0] = pm->imbalanceRatio;
1781   /* Weight cells by dofs on cell by default */
1782   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1783   for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1784   if (section) {
1785     PetscInt cStart, cEnd, dof;
1786 
1787     /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1788     /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1789     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1790     for (v = cStart; v < cStart + numVertices; ++v) {
1791       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1792       vwgt[v-cStart] = PetscMax(dof, 1);
1793     }
1794   }
1795   wgtflag |= 2; /* have weights on graph vertices */
1796 
1797   if (nparts == 1) {
1798     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1799   } else {
1800     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1801     if (vtxdist[p+1] == vtxdist[size]) {
1802       if (rank == p) {
1803         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1804         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1805         options[METIS_OPTION_SEED]   = pm->randomSeed;
1806         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1807         if (metis_ptype == 1) {
1808           PetscStackPush("METIS_PartGraphRecursive");
1809           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1810           PetscStackPop;
1811           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1812         } else {
1813           /*
1814            It would be nice to activate the two options below, but they would need some actual testing.
1815            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1816            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1817           */
1818           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1819           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1820           PetscStackPush("METIS_PartGraphKway");
1821           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1822           PetscStackPop;
1823           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1824         }
1825       }
1826     } else {
1827       options[0] = 1; /*use options */
1828       options[1] = pm->debugFlag;
1829       options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1830       PetscStackPush("ParMETIS_V3_PartKway");
1831       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1832       PetscStackPop;
1833       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1834     }
1835   }
1836   /* Convert to PetscSection+IS */
1837   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1838   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1839   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1840   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1841   for (p = 0, i = 0; p < nparts; ++p) {
1842     for (v = 0; v < nvtxs; ++v) {
1843       if (assignment[v] == p) points[i++] = v;
1844     }
1845   }
1846   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1847   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1848   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1849   PetscFunctionReturn(0);
1850 #else
1851   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1852 #endif
1853 }
1854 
1855 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1856 {
1857   PetscFunctionBegin;
1858   part->noGraph             = PETSC_FALSE;
1859   part->ops->view           = PetscPartitionerView_ParMetis;
1860   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1861   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1862   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1863   PetscFunctionReturn(0);
1864 }
1865 
1866 /*MC
1867   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1868 
1869   Level: intermediate
1870 
1871 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1872 M*/
1873 
1874 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1875 {
1876   PetscPartitioner_ParMetis *p;
1877   PetscErrorCode          ierr;
1878 
1879   PetscFunctionBegin;
1880   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1881   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1882   part->data = p;
1883 
1884   p->ptype          = 0;
1885   p->imbalanceRatio = 1.05;
1886   p->debugFlag      = 0;
1887   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
1888 
1889   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1890   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1891   PetscFunctionReturn(0);
1892 }
1893 
1894 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1895 const char PTScotchPartitionerCitation[] =
1896   "@article{PTSCOTCH,\n"
1897   "  author  = {C. Chevalier and F. Pellegrini},\n"
1898   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1899   "  journal = {Parallel Computing},\n"
1900   "  volume  = {34},\n"
1901   "  number  = {6},\n"
1902   "  pages   = {318--331},\n"
1903   "  year    = {2008},\n"
1904   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1905   "}\n";
1906 
1907 #if defined(PETSC_HAVE_PTSCOTCH)
1908 
1909 EXTERN_C_BEGIN
1910 #include <ptscotch.h>
1911 EXTERN_C_END
1912 
1913 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1914 
1915 static int PTScotch_Strategy(PetscInt strategy)
1916 {
1917   switch (strategy) {
1918   case  0: return SCOTCH_STRATDEFAULT;
1919   case  1: return SCOTCH_STRATQUALITY;
1920   case  2: return SCOTCH_STRATSPEED;
1921   case  3: return SCOTCH_STRATBALANCE;
1922   case  4: return SCOTCH_STRATSAFETY;
1923   case  5: return SCOTCH_STRATSCALABILITY;
1924   case  6: return SCOTCH_STRATRECURSIVE;
1925   case  7: return SCOTCH_STRATREMAP;
1926   default: return SCOTCH_STRATDEFAULT;
1927   }
1928 }
1929 
1930 
1931 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1932                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1933 {
1934   SCOTCH_Graph   grafdat;
1935   SCOTCH_Strat   stradat;
1936   SCOTCH_Num     vertnbr = n;
1937   SCOTCH_Num     edgenbr = xadj[n];
1938   SCOTCH_Num*    velotab = vtxwgt;
1939   SCOTCH_Num*    edlotab = adjwgt;
1940   SCOTCH_Num     flagval = strategy;
1941   double         kbalval = imbalance;
1942   PetscErrorCode ierr;
1943 
1944   PetscFunctionBegin;
1945   {
1946     PetscBool flg = PETSC_TRUE;
1947     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1948     if (!flg) velotab = NULL;
1949   }
1950   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1951   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1952   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1953   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1954 #if defined(PETSC_USE_DEBUG)
1955   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1956 #endif
1957   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1958   SCOTCH_stratExit(&stradat);
1959   SCOTCH_graphExit(&grafdat);
1960   PetscFunctionReturn(0);
1961 }
1962 
1963 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1964                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1965 {
1966   PetscMPIInt     procglbnbr;
1967   PetscMPIInt     proclocnum;
1968   SCOTCH_Arch     archdat;
1969   SCOTCH_Dgraph   grafdat;
1970   SCOTCH_Dmapping mappdat;
1971   SCOTCH_Strat    stradat;
1972   SCOTCH_Num      vertlocnbr;
1973   SCOTCH_Num      edgelocnbr;
1974   SCOTCH_Num*     veloloctab = vtxwgt;
1975   SCOTCH_Num*     edloloctab = adjwgt;
1976   SCOTCH_Num      flagval = strategy;
1977   double          kbalval = imbalance;
1978   PetscErrorCode  ierr;
1979 
1980   PetscFunctionBegin;
1981   {
1982     PetscBool flg = PETSC_TRUE;
1983     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1984     if (!flg) veloloctab = NULL;
1985   }
1986   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1987   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1988   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1989   edgelocnbr = xadj[vertlocnbr];
1990 
1991   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1992   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1993 #if defined(PETSC_USE_DEBUG)
1994   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1995 #endif
1996   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1997   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1998   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1999   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2000   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
2001 
2002   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2003   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2004   SCOTCH_archExit(&archdat);
2005   SCOTCH_stratExit(&stradat);
2006   SCOTCH_dgraphExit(&grafdat);
2007   PetscFunctionReturn(0);
2008 }
2009 
2010 #endif /* PETSC_HAVE_PTSCOTCH */
2011 
2012 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2013 {
2014   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2015   PetscErrorCode             ierr;
2016 
2017   PetscFunctionBegin;
2018   ierr = PetscFree(p);CHKERRQ(ierr);
2019   PetscFunctionReturn(0);
2020 }
2021 
2022 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2023 {
2024   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2025   PetscErrorCode            ierr;
2026 
2027   PetscFunctionBegin;
2028   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
2029   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
2030   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
2031   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
2032   PetscFunctionReturn(0);
2033 }
2034 
2035 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2036 {
2037   PetscBool      iascii;
2038   PetscErrorCode ierr;
2039 
2040   PetscFunctionBegin;
2041   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2042   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
2043   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
2044   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
2045   PetscFunctionReturn(0);
2046 }
2047 
2048 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2049 {
2050   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2051   const char *const         *slist = PTScotchStrategyList;
2052   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2053   PetscBool                 flag;
2054   PetscErrorCode            ierr;
2055 
2056   PetscFunctionBegin;
2057   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
2058   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
2059   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
2060   ierr = PetscOptionsTail();CHKERRQ(ierr);
2061   PetscFunctionReturn(0);
2062 }
2063 
2064 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
2065 {
2066 #if defined(PETSC_HAVE_PTSCOTCH)
2067   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
2068   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
2069   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
2070   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
2071   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
2072   PetscInt      *vwgt       = NULL;        /* Vertex weights */
2073   PetscInt      *adjwgt     = NULL;        /* Edge weights */
2074   PetscInt       v, i, *assignment, *points;
2075   PetscMPIInt    size, rank, p;
2076   PetscErrorCode ierr;
2077 
2078   PetscFunctionBegin;
2079   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2080   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2081   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
2082 
2083   /* Calculate vertex distribution */
2084   vtxdist[0] = 0;
2085   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2086   for (p = 2; p <= size; ++p) {
2087     vtxdist[p] += vtxdist[p-1];
2088   }
2089 
2090   if (nparts == 1) {
2091     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
2092   } else { /* Weight cells by dofs on cell by default */
2093     PetscSection section;
2094 
2095     /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
2096     /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
2097     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
2098     for (v = 0; v < PetscMax(nvtxs,1); ++v) vwgt[v] = 1;
2099     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
2100     if (section) {
2101       PetscInt vStart, vEnd, dof;
2102       ierr = DMPlexGetHeightStratum(dm, part->height, &vStart, &vEnd);CHKERRQ(ierr);
2103       for (v = vStart; v < vStart + numVertices; ++v) {
2104         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
2105         vwgt[v-vStart] = PetscMax(dof, 1);
2106       }
2107     }
2108     {
2109       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2110       int                       strat = PTScotch_Strategy(pts->strategy);
2111       double                    imbal = (double)pts->imbalance;
2112 
2113       for (p = 0; !vtxdist[p+1] && p < size; ++p);
2114       if (vtxdist[p+1] == vtxdist[size]) {
2115         if (rank == p) {
2116           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
2117         }
2118       } else {
2119         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
2120       }
2121     }
2122     ierr = PetscFree(vwgt);CHKERRQ(ierr);
2123   }
2124 
2125   /* Convert to PetscSection+IS */
2126   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
2127   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2128   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
2129   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2130   for (p = 0, i = 0; p < nparts; ++p) {
2131     for (v = 0; v < nvtxs; ++v) {
2132       if (assignment[v] == p) points[i++] = v;
2133     }
2134   }
2135   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2136   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2137 
2138   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
2139   PetscFunctionReturn(0);
2140 #else
2141   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2142 #endif
2143 }
2144 
2145 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2146 {
2147   PetscFunctionBegin;
2148   part->noGraph             = PETSC_FALSE;
2149   part->ops->view           = PetscPartitionerView_PTScotch;
2150   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2151   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2152   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2153   PetscFunctionReturn(0);
2154 }
2155 
2156 /*MC
2157   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2158 
2159   Level: intermediate
2160 
2161 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2162 M*/
2163 
2164 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2165 {
2166   PetscPartitioner_PTScotch *p;
2167   PetscErrorCode          ierr;
2168 
2169   PetscFunctionBegin;
2170   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2171   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2172   part->data = p;
2173 
2174   p->strategy  = 0;
2175   p->imbalance = 0.01;
2176 
2177   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2178   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2179   PetscFunctionReturn(0);
2180 }
2181 
2182 
2183 /*@
2184   DMPlexGetPartitioner - Get the mesh partitioner
2185 
2186   Not collective
2187 
2188   Input Parameter:
2189 . dm - The DM
2190 
2191   Output Parameter:
2192 . part - The PetscPartitioner
2193 
2194   Level: developer
2195 
2196   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2197 
2198 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
2199 @*/
2200 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2201 {
2202   DM_Plex       *mesh = (DM_Plex *) dm->data;
2203 
2204   PetscFunctionBegin;
2205   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2206   PetscValidPointer(part, 2);
2207   *part = mesh->partitioner;
2208   PetscFunctionReturn(0);
2209 }
2210 
2211 /*@
2212   DMPlexSetPartitioner - Set the mesh partitioner
2213 
2214   logically collective on dm and part
2215 
2216   Input Parameters:
2217 + dm - The DM
2218 - part - The partitioner
2219 
2220   Level: developer
2221 
2222   Note: Any existing PetscPartitioner will be destroyed.
2223 
2224 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2225 @*/
2226 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2227 {
2228   DM_Plex       *mesh = (DM_Plex *) dm->data;
2229   PetscErrorCode ierr;
2230 
2231   PetscFunctionBegin;
2232   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2233   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2234   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2235   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2236   mesh->partitioner = part;
2237   PetscFunctionReturn(0);
2238 }
2239 
2240 static PetscErrorCode DMPlexAddClosure_Private(DM dm, PetscHSetI ht, PetscInt point)
2241 {
2242   const PetscInt *cone;
2243   PetscInt       coneSize, c;
2244   PetscBool      missing;
2245   PetscErrorCode ierr;
2246 
2247   PetscFunctionBeginHot;
2248   ierr = PetscHSetIQueryAdd(ht, point, &missing);CHKERRQ(ierr);
2249   if (missing) {
2250     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2251     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2252     for (c = 0; c < coneSize; c++) {
2253       ierr = DMPlexAddClosure_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2254     }
2255   }
2256   PetscFunctionReturn(0);
2257 }
2258 
2259 PETSC_UNUSED static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2260 {
2261   PetscErrorCode ierr;
2262 
2263   PetscFunctionBegin;
2264   if (up) {
2265     PetscInt parent;
2266 
2267     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2268     if (parent != point) {
2269       PetscInt closureSize, *closure = NULL, i;
2270 
2271       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2272       for (i = 0; i < closureSize; i++) {
2273         PetscInt cpoint = closure[2*i];
2274 
2275         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2276         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2277       }
2278       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2279     }
2280   }
2281   if (down) {
2282     PetscInt numChildren;
2283     const PetscInt *children;
2284 
2285     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2286     if (numChildren) {
2287       PetscInt i;
2288 
2289       for (i = 0; i < numChildren; i++) {
2290         PetscInt cpoint = children[i];
2291 
2292         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2293         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2294       }
2295     }
2296   }
2297   PetscFunctionReturn(0);
2298 }
2299 
2300 static PetscErrorCode DMPlexAddClosureTree_Up_Private(DM dm, PetscHSetI ht, PetscInt point)
2301 {
2302   PetscInt       parent;
2303   PetscErrorCode ierr;
2304 
2305   PetscFunctionBeginHot;
2306   ierr = DMPlexGetTreeParent(dm, point, &parent,NULL);CHKERRQ(ierr);
2307   if (point != parent) {
2308     const PetscInt *cone;
2309     PetscInt       coneSize, c;
2310 
2311     ierr = DMPlexAddClosureTree_Up_Private(dm, ht, parent);CHKERRQ(ierr);
2312     ierr = DMPlexAddClosure_Private(dm, ht, parent);CHKERRQ(ierr);
2313     ierr = DMPlexGetCone(dm, parent, &cone);CHKERRQ(ierr);
2314     ierr = DMPlexGetConeSize(dm, parent, &coneSize);CHKERRQ(ierr);
2315     for (c = 0; c < coneSize; c++) {
2316       const PetscInt cp = cone[c];
2317 
2318       ierr = DMPlexAddClosureTree_Up_Private(dm, ht, cp);CHKERRQ(ierr);
2319     }
2320   }
2321   PetscFunctionReturn(0);
2322 }
2323 
2324 static PetscErrorCode DMPlexAddClosureTree_Down_Private(DM dm, PetscHSetI ht, PetscInt point)
2325 {
2326   PetscInt       i, numChildren;
2327   const PetscInt *children;
2328   PetscErrorCode ierr;
2329 
2330   PetscFunctionBeginHot;
2331   ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
2332   for (i = 0; i < numChildren; i++) {
2333     ierr = PetscHSetIAdd(ht, children[i]);CHKERRQ(ierr);
2334   }
2335   PetscFunctionReturn(0);
2336 }
2337 
2338 static PetscErrorCode DMPlexAddClosureTree_Private(DM dm, PetscHSetI ht, PetscInt point)
2339 {
2340   const PetscInt *cone;
2341   PetscInt       coneSize, c;
2342   PetscErrorCode ierr;
2343 
2344   PetscFunctionBeginHot;
2345   ierr = PetscHSetIAdd(ht, point);CHKERRQ(ierr);
2346   ierr = DMPlexAddClosureTree_Up_Private(dm, ht, point);CHKERRQ(ierr);
2347   ierr = DMPlexAddClosureTree_Down_Private(dm, ht, point);CHKERRQ(ierr);
2348   ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2349   ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2350   for (c = 0; c < coneSize; c++) {
2351     ierr = DMPlexAddClosureTree_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2352   }
2353   PetscFunctionReturn(0);
2354 }
2355 
2356 PetscErrorCode DMPlexClosurePoints_Private(DM dm, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2357 {
2358   DM_Plex         *mesh = (DM_Plex *)dm->data;
2359   const PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2360   PetscInt        nelems, *elems, off = 0, p;
2361   PetscHSetI      ht;
2362   PetscErrorCode  ierr;
2363 
2364   PetscFunctionBegin;
2365   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2366   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2367   if (!hasTree) {
2368     for (p = 0; p < numPoints; ++p) {
2369       ierr = DMPlexAddClosure_Private(dm, ht, points[p]);CHKERRQ(ierr);
2370     }
2371   } else {
2372 #if 1
2373     for (p = 0; p < numPoints; ++p) {
2374       ierr = DMPlexAddClosureTree_Private(dm, ht, points[p]);CHKERRQ(ierr);
2375     }
2376 #else
2377     PetscInt  *closure = NULL, closureSize, c;
2378     for (p = 0; p < numPoints; ++p) {
2379       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2380       for (c = 0; c < closureSize*2; c += 2) {
2381         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2382         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2383       }
2384     }
2385     if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2386 #endif
2387   }
2388   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2389   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2390   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2391   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2392   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2393   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2394   PetscFunctionReturn(0);
2395 }
2396 
2397 /*@
2398   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2399 
2400   Input Parameters:
2401 + dm     - The DM
2402 - label  - DMLabel assinging ranks to remote roots
2403 
2404   Level: developer
2405 
2406 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2407 @*/
2408 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2409 {
2410   IS              rankIS,   pointIS, closureIS;
2411   const PetscInt *ranks,   *points;
2412   PetscInt        numRanks, numPoints, r;
2413   PetscErrorCode  ierr;
2414 
2415   PetscFunctionBegin;
2416   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2417   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2418   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2419   for (r = 0; r < numRanks; ++r) {
2420     const PetscInt rank = ranks[r];
2421     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2422     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2423     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2424     ierr = DMPlexClosurePoints_Private(dm, numPoints, points, &closureIS);CHKERRQ(ierr);
2425     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2426     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2427     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2428     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2429   }
2430   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2431   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2432   PetscFunctionReturn(0);
2433 }
2434 
2435 /*@
2436   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2437 
2438   Input Parameters:
2439 + dm     - The DM
2440 - label  - DMLabel assinging ranks to remote roots
2441 
2442   Level: developer
2443 
2444 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2445 @*/
2446 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2447 {
2448   IS              rankIS,   pointIS;
2449   const PetscInt *ranks,   *points;
2450   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2451   PetscInt       *adj = NULL;
2452   PetscErrorCode  ierr;
2453 
2454   PetscFunctionBegin;
2455   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2456   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2457   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2458   for (r = 0; r < numRanks; ++r) {
2459     const PetscInt rank = ranks[r];
2460 
2461     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2462     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2463     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2464     for (p = 0; p < numPoints; ++p) {
2465       adjSize = PETSC_DETERMINE;
2466       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2467       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2468     }
2469     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2470     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2471   }
2472   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2473   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2474   ierr = PetscFree(adj);CHKERRQ(ierr);
2475   PetscFunctionReturn(0);
2476 }
2477 
2478 /*@
2479   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2480 
2481   Input Parameters:
2482 + dm     - The DM
2483 - label  - DMLabel assinging ranks to remote roots
2484 
2485   Level: developer
2486 
2487   Note: This is required when generating multi-level overlaps to capture
2488   overlap points from non-neighbouring partitions.
2489 
2490 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2491 @*/
2492 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2493 {
2494   MPI_Comm        comm;
2495   PetscMPIInt     rank;
2496   PetscSF         sfPoint;
2497   DMLabel         lblRoots, lblLeaves;
2498   IS              rankIS, pointIS;
2499   const PetscInt *ranks;
2500   PetscInt        numRanks, r;
2501   PetscErrorCode  ierr;
2502 
2503   PetscFunctionBegin;
2504   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2505   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2506   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2507   /* Pull point contributions from remote leaves into local roots */
2508   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2509   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2510   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2511   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2512   for (r = 0; r < numRanks; ++r) {
2513     const PetscInt remoteRank = ranks[r];
2514     if (remoteRank == rank) continue;
2515     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2516     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2517     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2518   }
2519   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2520   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2521   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2522   /* Push point contributions from roots into remote leaves */
2523   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2524   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2525   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2526   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2527   for (r = 0; r < numRanks; ++r) {
2528     const PetscInt remoteRank = ranks[r];
2529     if (remoteRank == rank) continue;
2530     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2531     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2532     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2533   }
2534   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2535   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2536   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2537   PetscFunctionReturn(0);
2538 }
2539 
2540 /*@
2541   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2542 
2543   Input Parameters:
2544 + dm        - The DM
2545 . rootLabel - DMLabel assinging ranks to local roots
2546 . processSF - A star forest mapping into the local index on each remote rank
2547 
2548   Output Parameter:
2549 - leafLabel - DMLabel assinging ranks to remote roots
2550 
2551   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2552   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2553 
2554   Level: developer
2555 
2556 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2557 @*/
2558 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2559 {
2560   MPI_Comm           comm;
2561   PetscMPIInt        rank, size, r;
2562   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2563   PetscSF            sfPoint;
2564   PetscSection       rootSection;
2565   PetscSFNode       *rootPoints, *leafPoints;
2566   const PetscSFNode *remote;
2567   const PetscInt    *local, *neighbors;
2568   IS                 valueIS;
2569   PetscBool          mpiOverflow = PETSC_FALSE;
2570   PetscErrorCode     ierr;
2571 
2572   PetscFunctionBegin;
2573   ierr = PetscLogEventBegin(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2574   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2575   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2576   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2577   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2578 
2579   /* Convert to (point, rank) and use actual owners */
2580   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2581   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2582   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2583   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2584   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2585   for (n = 0; n < numNeighbors; ++n) {
2586     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2587     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2588   }
2589   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2590   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2591   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2592   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2593   for (n = 0; n < numNeighbors; ++n) {
2594     IS              pointIS;
2595     const PetscInt *points;
2596 
2597     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2598     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2599     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2600     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2601     for (p = 0; p < numPoints; ++p) {
2602       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2603       else       {l = -1;}
2604       if (l >= 0) {rootPoints[off+p] = remote[l];}
2605       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2606     }
2607     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2608     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2609   }
2610 
2611   /* Try to communicate overlap using All-to-All */
2612   if (!processSF) {
2613     PetscInt64  counter = 0;
2614     PetscBool   locOverflow = PETSC_FALSE;
2615     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2616 
2617     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2618     for (n = 0; n < numNeighbors; ++n) {
2619       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2620       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2621 #if defined(PETSC_USE_64BIT_INDICES)
2622       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2623       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2624 #endif
2625       scounts[neighbors[n]] = (PetscMPIInt) dof;
2626       sdispls[neighbors[n]] = (PetscMPIInt) off;
2627     }
2628     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2629     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2630     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2631     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2632     if (!mpiOverflow) {
2633       ierr = PetscInfo(dm,"Using Alltoallv for mesh distribution\n");CHKERRQ(ierr);
2634       leafSize = (PetscInt) counter;
2635       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2636       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2637     }
2638     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2639   }
2640 
2641   /* Communicate overlap using process star forest */
2642   if (processSF || mpiOverflow) {
2643     PetscSF      procSF;
2644     PetscSFNode  *remote;
2645     PetscSection leafSection;
2646 
2647     if (processSF) {
2648       ierr = PetscInfo(dm,"Using processSF for mesh distribution\n");CHKERRQ(ierr);
2649       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2650       procSF = processSF;
2651     } else {
2652       ierr = PetscInfo(dm,"Using processSF for mesh distribution (MPI overflow)\n");CHKERRQ(ierr);
2653       ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr);
2654       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2655       ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr);
2656       ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2657     }
2658 
2659     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2660     ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2661     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2662     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2663     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2664   }
2665 
2666   for (p = 0; p < leafSize; p++) {
2667     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2668   }
2669 
2670   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2671   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2672   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2673   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2674   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2675   ierr = PetscLogEventEnd(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2676   PetscFunctionReturn(0);
2677 }
2678 
2679 /*@
2680   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2681 
2682   Input Parameters:
2683 + dm    - The DM
2684 . label - DMLabel assinging ranks to remote roots
2685 
2686   Output Parameter:
2687 - sf    - The star forest communication context encapsulating the defined mapping
2688 
2689   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2690 
2691   Level: developer
2692 
2693 .seealso: DMPlexDistribute(), DMPlexCreateOverlap()
2694 @*/
2695 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2696 {
2697   PetscMPIInt     rank;
2698   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0, nNeighbors;
2699   PetscSFNode    *remotePoints;
2700   IS              remoteRootIS, neighborsIS;
2701   const PetscInt *remoteRoots, *neighbors;
2702   PetscErrorCode ierr;
2703 
2704   PetscFunctionBegin;
2705   ierr = PetscLogEventBegin(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
2706   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2707 
2708   ierr = DMLabelGetValueIS(label, &neighborsIS);CHKERRQ(ierr);
2709 #if 0
2710   {
2711     IS is;
2712     ierr = ISDuplicate(neighborsIS, &is);CHKERRQ(ierr);
2713     ierr = ISSort(is);CHKERRQ(ierr);
2714     ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
2715     neighborsIS = is;
2716   }
2717 #endif
2718   ierr = ISGetLocalSize(neighborsIS, &nNeighbors);CHKERRQ(ierr);
2719   ierr = ISGetIndices(neighborsIS, &neighbors);CHKERRQ(ierr);
2720   for (numRemote = 0, n = 0; n < nNeighbors; ++n) {
2721     ierr = DMLabelGetStratumSize(label, neighbors[n], &numPoints);CHKERRQ(ierr);
2722     numRemote += numPoints;
2723   }
2724   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2725   /* Put owned points first */
2726   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2727   if (numPoints > 0) {
2728     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2729     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2730     for (p = 0; p < numPoints; p++) {
2731       remotePoints[idx].index = remoteRoots[p];
2732       remotePoints[idx].rank = rank;
2733       idx++;
2734     }
2735     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2736     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2737   }
2738   /* Now add remote points */
2739   for (n = 0; n < nNeighbors; ++n) {
2740     const PetscInt nn = neighbors[n];
2741 
2742     ierr = DMLabelGetStratumSize(label, nn, &numPoints);CHKERRQ(ierr);
2743     if (nn == rank || numPoints <= 0) continue;
2744     ierr = DMLabelGetStratumIS(label, nn, &remoteRootIS);CHKERRQ(ierr);
2745     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2746     for (p = 0; p < numPoints; p++) {
2747       remotePoints[idx].index = remoteRoots[p];
2748       remotePoints[idx].rank = nn;
2749       idx++;
2750     }
2751     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2752     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2753   }
2754   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2755   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2756   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2757   ierr = PetscSFSetUp(*sf);CHKERRQ(ierr);
2758   ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
2759   ierr = PetscLogEventEnd(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
2760   PetscFunctionReturn(0);
2761 }
2762 
2763 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
2764  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
2765  * them out in that case. */
2766 #if defined(PETSC_HAVE_PARMETIS)
2767 /*@C
2768 
2769   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
2770 
2771   Input parameters:
2772   + dm                - The DMPlex object.
2773   + n                 - The number of points.
2774   + pointsToRewrite   - The points in the SF whose ownership will change.
2775   + targetOwners      - New owner for each element in pointsToRewrite.
2776   + degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
2777 
2778   Level: developer
2779 
2780 @*/
2781 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
2782 {
2783   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
2784   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
2785   PetscSFNode  *leafLocationsNew;
2786   const         PetscSFNode *iremote;
2787   const         PetscInt *ilocal;
2788   PetscBool    *isLeaf;
2789   PetscSF       sf;
2790   MPI_Comm      comm;
2791   PetscMPIInt   rank, size;
2792 
2793   PetscFunctionBegin;
2794   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2795   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2796   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2797   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2798 
2799   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2800   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2801   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2802   for (i=0; i<pEnd-pStart; i++) {
2803     isLeaf[i] = PETSC_FALSE;
2804   }
2805   for (i=0; i<nleafs; i++) {
2806     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2807   }
2808 
2809   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
2810   cumSumDegrees[0] = 0;
2811   for (i=1; i<=pEnd-pStart; i++) {
2812     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
2813   }
2814   sumDegrees = cumSumDegrees[pEnd-pStart];
2815   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
2816 
2817   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
2818   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
2819   for (i=0; i<pEnd-pStart; i++) {
2820     rankOnLeafs[i] = rank;
2821   }
2822   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2823   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2824   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
2825 
2826   /* get the remote local points of my leaves */
2827   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
2828   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
2829   for (i=0; i<pEnd-pStart; i++) {
2830     points[i] = pStart+i;
2831   }
2832   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2833   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2834   ierr = PetscFree(points);CHKERRQ(ierr);
2835   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
2836   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
2837   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
2838   for (i=0; i<pEnd-pStart; i++) {
2839     newOwners[i] = -1;
2840     newNumbers[i] = -1;
2841   }
2842   {
2843     PetscInt oldNumber, newNumber, oldOwner, newOwner;
2844     for (i=0; i<n; i++) {
2845       oldNumber = pointsToRewrite[i];
2846       newNumber = -1;
2847       oldOwner = rank;
2848       newOwner = targetOwners[i];
2849       if (oldOwner == newOwner) {
2850         newNumber = oldNumber;
2851       } else {
2852         for (j=0; j<degrees[oldNumber]; j++) {
2853           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
2854             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
2855             break;
2856           }
2857         }
2858       }
2859       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
2860 
2861       newOwners[oldNumber] = newOwner;
2862       newNumbers[oldNumber] = newNumber;
2863     }
2864   }
2865   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
2866   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
2867   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
2868 
2869   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2870   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2871   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2872   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2873 
2874   /* Now count how many leafs we have on each processor. */
2875   leafCounter=0;
2876   for (i=0; i<pEnd-pStart; i++) {
2877     if (newOwners[i] >= 0) {
2878       if (newOwners[i] != rank) {
2879         leafCounter++;
2880       }
2881     } else {
2882       if (isLeaf[i]) {
2883         leafCounter++;
2884       }
2885     }
2886   }
2887 
2888   /* Now set up the new sf by creating the leaf arrays */
2889   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
2890   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
2891 
2892   leafCounter = 0;
2893   counter = 0;
2894   for (i=0; i<pEnd-pStart; i++) {
2895     if (newOwners[i] >= 0) {
2896       if (newOwners[i] != rank) {
2897         leafsNew[leafCounter] = i;
2898         leafLocationsNew[leafCounter].rank = newOwners[i];
2899         leafLocationsNew[leafCounter].index = newNumbers[i];
2900         leafCounter++;
2901       }
2902     } else {
2903       if (isLeaf[i]) {
2904         leafsNew[leafCounter] = i;
2905         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
2906         leafLocationsNew[leafCounter].index = iremote[counter].index;
2907         leafCounter++;
2908       }
2909     }
2910     if (isLeaf[i]) {
2911       counter++;
2912     }
2913   }
2914 
2915   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
2916   ierr = PetscFree(newOwners);CHKERRQ(ierr);
2917   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
2918   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
2919   PetscFunctionReturn(0);
2920 }
2921 
2922 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
2923 {
2924   PetscInt *distribution, min, max, sum, i, ierr;
2925   PetscMPIInt rank, size;
2926   PetscFunctionBegin;
2927   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2928   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2929   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
2930   for (i=0; i<n; i++) {
2931     if (part) distribution[part[i]] += vtxwgt[skip*i];
2932     else distribution[rank] += vtxwgt[skip*i];
2933   }
2934   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2935   min = distribution[0];
2936   max = distribution[0];
2937   sum = distribution[0];
2938   for (i=1; i<size; i++) {
2939     if (distribution[i]<min) min=distribution[i];
2940     if (distribution[i]>max) max=distribution[i];
2941     sum += distribution[i];
2942   }
2943   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
2944   ierr = PetscFree(distribution);CHKERRQ(ierr);
2945   PetscFunctionReturn(0);
2946 }
2947 
2948 #endif
2949 
2950 /*@
2951   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
2952 
2953   Input parameters:
2954   + dm               - The DMPlex object.
2955   + entityDepth      - depth of the entity to balance (0 -> balance vertices).
2956   + useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
2957   + parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
2958 
2959   Output parameters:
2960   + success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
2961 
2962   Level: user
2963 
2964 @*/
2965 
2966 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
2967 {
2968 #if defined(PETSC_HAVE_PARMETIS)
2969   PetscSF     sf;
2970   PetscInt    ierr, i, j, idx, jdx;
2971   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
2972   const       PetscInt *degrees, *ilocal;
2973   const       PetscSFNode *iremote;
2974   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
2975   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
2976   PetscMPIInt rank, size;
2977   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
2978   const       PetscInt *cumSumVertices;
2979   PetscInt    offset, counter;
2980   PetscInt    lenadjncy;
2981   PetscInt    *xadj, *adjncy, *vtxwgt;
2982   PetscInt    lenxadj;
2983   PetscInt    *adjwgt = NULL;
2984   PetscInt    *part, *options;
2985   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
2986   real_t      *ubvec;
2987   PetscInt    *firstVertices, *renumbering;
2988   PetscInt    failed, failedGlobal;
2989   MPI_Comm    comm;
2990   Mat         A, Apre;
2991   const char *prefix = NULL;
2992   PetscViewer       viewer;
2993   PetscViewerFormat format;
2994   PetscLayout layout;
2995 
2996   PetscFunctionBegin;
2997   if (success) *success = PETSC_FALSE;
2998   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2999   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3000   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3001   if (size==1) PetscFunctionReturn(0);
3002 
3003   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3004 
3005   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
3006   if (viewer) {
3007     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
3008   }
3009 
3010   /* Figure out all points in the plex that we are interested in balancing. */
3011   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
3012   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3013   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
3014 
3015   for (i=0; i<pEnd-pStart; i++) {
3016     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
3017   }
3018 
3019   /* There are three types of points:
3020    * exclusivelyOwned: points that are owned by this process and only seen by this process
3021    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
3022    * leaf: a point that is seen by this process but owned by a different process
3023    */
3024   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3025   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3026   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3027   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
3028   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
3029   for (i=0; i<pEnd-pStart; i++) {
3030     isNonExclusivelyOwned[i] = PETSC_FALSE;
3031     isExclusivelyOwned[i] = PETSC_FALSE;
3032     isLeaf[i] = PETSC_FALSE;
3033   }
3034 
3035   /* start by marking all the leafs */
3036   for (i=0; i<nleafs; i++) {
3037     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3038   }
3039 
3040   /* for an owned point, we can figure out whether another processor sees it or
3041    * not by calculating its degree */
3042   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
3043   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
3044 
3045   numExclusivelyOwned = 0;
3046   numNonExclusivelyOwned = 0;
3047   for (i=0; i<pEnd-pStart; i++) {
3048     if (toBalance[i]) {
3049       if (degrees[i] > 0) {
3050         isNonExclusivelyOwned[i] = PETSC_TRUE;
3051         numNonExclusivelyOwned += 1;
3052       } else {
3053         if (!isLeaf[i]) {
3054           isExclusivelyOwned[i] = PETSC_TRUE;
3055           numExclusivelyOwned += 1;
3056         }
3057       }
3058     }
3059   }
3060 
3061   /* We are going to build a graph with one vertex per core representing the
3062    * exclusively owned points and then one vertex per nonExclusively owned
3063    * point. */
3064 
3065   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3066   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
3067   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3068   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
3069 
3070   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3071   offset = cumSumVertices[rank];
3072   counter = 0;
3073   for (i=0; i<pEnd-pStart; i++) {
3074     if (toBalance[i]) {
3075       if (degrees[i] > 0) {
3076         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
3077         counter++;
3078       }
3079     }
3080   }
3081 
3082   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
3083   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
3084   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3085   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3086 
3087   /* Now start building the data structure for ParMETIS */
3088 
3089   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
3090   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
3091   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3092   ierr = MatSetUp(Apre);CHKERRQ(ierr);
3093 
3094   for (i=0; i<pEnd-pStart; i++) {
3095     if (toBalance[i]) {
3096       idx = cumSumVertices[rank];
3097       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3098       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3099       else continue;
3100       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3101       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3102     }
3103   }
3104 
3105   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3106   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3107 
3108   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
3109   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
3110   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3111   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
3112   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
3113 
3114   for (i=0; i<pEnd-pStart; i++) {
3115     if (toBalance[i]) {
3116       idx = cumSumVertices[rank];
3117       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3118       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3119       else continue;
3120       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3121       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3122     }
3123   }
3124 
3125   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3126   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3127   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
3128   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3129 
3130   nparts = size;
3131   wgtflag = 2;
3132   numflag = 0;
3133   ncon = 2;
3134   real_t *tpwgts;
3135   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
3136   for (i=0; i<ncon*nparts; i++) {
3137     tpwgts[i] = 1./(nparts);
3138   }
3139 
3140   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
3141   ubvec[0] = 1.01;
3142   ubvec[1] = 1.01;
3143   lenadjncy = 0;
3144   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3145     PetscInt temp=0;
3146     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3147     lenadjncy += temp;
3148     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3149   }
3150   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
3151   lenxadj = 2 + numNonExclusivelyOwned;
3152   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
3153   xadj[0] = 0;
3154   counter = 0;
3155   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3156     PetscInt        temp=0;
3157     const PetscInt *cols;
3158     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3159     ierr = PetscMemcpy(&adjncy[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
3160     counter += temp;
3161     xadj[i+1] = counter;
3162     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3163   }
3164 
3165   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
3166   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
3167   vtxwgt[0] = numExclusivelyOwned;
3168   if (ncon>1) vtxwgt[1] = 1;
3169   for (i=0; i<numNonExclusivelyOwned; i++) {
3170     vtxwgt[ncon*(i+1)] = 1;
3171     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3172   }
3173 
3174   if (viewer) {
3175     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
3176     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
3177   }
3178   if (parallel) {
3179     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
3180     options[0] = 1;
3181     options[1] = 0; /* Verbosity */
3182     options[2] = 0; /* Seed */
3183     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3184     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
3185     if (useInitialGuess) {
3186       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
3187       PetscStackPush("ParMETIS_V3_RefineKway");
3188       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3189       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3190       PetscStackPop;
3191     } else {
3192       PetscStackPush("ParMETIS_V3_PartKway");
3193       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3194       PetscStackPop;
3195       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3196     }
3197     ierr = PetscFree(options);CHKERRQ(ierr);
3198   } else {
3199     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
3200     Mat As;
3201     PetscInt numRows;
3202     PetscInt *partGlobal;
3203     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
3204 
3205     PetscInt *numExclusivelyOwnedAll;
3206     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
3207     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3208     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
3209 
3210     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
3211     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
3212     if (!rank) {
3213       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3214       lenadjncy = 0;
3215 
3216       for (i=0; i<numRows; i++) {
3217         PetscInt temp=0;
3218         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3219         lenadjncy += temp;
3220         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3221       }
3222       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
3223       lenxadj = 1 + numRows;
3224       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
3225       xadj_g[0] = 0;
3226       counter = 0;
3227       for (i=0; i<numRows; i++) {
3228         PetscInt        temp=0;
3229         const PetscInt *cols;
3230         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3231         ierr = PetscMemcpy(&adjncy_g[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
3232         counter += temp;
3233         xadj_g[i+1] = counter;
3234         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3235       }
3236       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
3237       for (i=0; i<size; i++){
3238         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3239         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3240         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3241           vtxwgt_g[ncon*j] = 1;
3242           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3243         }
3244       }
3245       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
3246       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
3247       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3248       options[METIS_OPTION_CONTIG] = 1;
3249       PetscStackPush("METIS_PartGraphKway");
3250       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3251       PetscStackPop;
3252       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3253       ierr = PetscFree(options);CHKERRQ(ierr);
3254       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
3255       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
3256       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
3257     }
3258     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
3259 
3260     /* Now scatter the parts array. */
3261     {
3262       PetscMPIInt *counts, *mpiCumSumVertices;
3263       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
3264       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
3265       for(i=0; i<size; i++) {
3266         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3267       }
3268       for(i=0; i<=size; i++) {
3269         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3270       }
3271       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3272       ierr = PetscFree(counts);CHKERRQ(ierr);
3273       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3274     }
3275 
3276     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3277     ierr = MatDestroy(&As);CHKERRQ(ierr);
3278   }
3279 
3280   ierr = MatDestroy(&A);CHKERRQ(ierr);
3281   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3282   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3283 
3284   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3285 
3286   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3287   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3288   firstVertices[rank] = part[0];
3289   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3290   for (i=0; i<size; i++) {
3291     renumbering[firstVertices[i]] = i;
3292   }
3293   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3294     part[i] = renumbering[part[i]];
3295   }
3296   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3297   failed = (PetscInt)(part[0] != rank);
3298   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3299 
3300   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3301   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3302 
3303   if (failedGlobal > 0) {
3304     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3305     ierr = PetscFree(xadj);CHKERRQ(ierr);
3306     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3307     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3308     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3309     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3310     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3311     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3312     ierr = PetscFree(part);CHKERRQ(ierr);
3313     if (viewer) {
3314       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3315       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3316     }
3317     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3318     PetscFunctionReturn(0);
3319   }
3320 
3321   /*Let's check how well we did distributing points*/
3322   if (viewer) {
3323     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3324     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3325     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3326     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3327     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3328   }
3329 
3330   /* Now check that every vertex is owned by a process that it is actually connected to. */
3331   for (i=1; i<=numNonExclusivelyOwned; i++) {
3332     PetscInt loc = 0;
3333     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3334     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3335     if (loc<0) {
3336       part[i] = rank;
3337     }
3338   }
3339 
3340   /* Let's see how significant the influences of the previous fixing up step was.*/
3341   if (viewer) {
3342     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3343     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3344   }
3345 
3346   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3347   ierr = PetscFree(xadj);CHKERRQ(ierr);
3348   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3349   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3350 
3351   /* Almost done, now rewrite the SF to reflect the new ownership. */
3352   {
3353     PetscInt *pointsToRewrite;
3354     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3355     counter = 0;
3356     for(i=0; i<pEnd-pStart; i++) {
3357       if (toBalance[i]) {
3358         if (isNonExclusivelyOwned[i]) {
3359           pointsToRewrite[counter] = i + pStart;
3360           counter++;
3361         }
3362       }
3363     }
3364     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3365     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3366   }
3367 
3368   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3369   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3370   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3371   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3372   ierr = PetscFree(part);CHKERRQ(ierr);
3373   if (viewer) {
3374     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3375     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3376   }
3377   if (success) *success = PETSC_TRUE;
3378   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3379 #else
3380   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3381 #endif
3382   PetscFunctionReturn(0);
3383 }
3384