xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 9d459c0e18b28d0079217bc50396e6f508b44c23)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
31 
32 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
33 {
34   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
35   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
36   IS             cellNumbering;
37   const PetscInt *cellNum;
38   PetscBool      useCone, useClosure;
39   PetscSection   section;
40   PetscSegBuffer adjBuffer;
41   PetscSF        sfPoint;
42   PetscInt       *adjCells = NULL, *remoteCells = NULL;
43   const PetscInt *local;
44   PetscInt       nroots, nleaves, l;
45   PetscMPIInt    rank;
46   PetscErrorCode ierr;
47 
48   PetscFunctionBegin;
49   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
50   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
51   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
52   if (dim != depth) {
53     /* We do not handle the uninterpolated case here */
54     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
55     /* DMPlexCreateNeighborCSR does not make a numbering */
56     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
57     /* Different behavior for empty graphs */
58     if (!*numVertices) {
59       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
60       (*offsets)[0] = 0;
61     }
62     /* Broken in parallel */
63     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
64     PetscFunctionReturn(0);
65   }
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
74   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
75   ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
76   if (globalNumbering) {
77     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
78     *globalNumbering = cellNumbering;
79   }
80   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
82      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
83    */
84   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
85   if (nroots >= 0) {
86     PetscInt fStart, fEnd, f;
87 
88     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
89     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
90     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
91     for (f = fStart; f < fEnd; ++f) {
92       const PetscInt *support;
93       PetscInt        supportSize;
94 
95       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
96       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
97       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
98       else if (supportSize == 2) {
99         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
100         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
101         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
102         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
103       }
104       /* Handle non-conforming meshes */
105       if (supportSize > 2) {
106         PetscInt        numChildren, i;
107         const PetscInt *children;
108 
109         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
110         for (i = 0; i < numChildren; ++i) {
111           const PetscInt child = children[i];
112           if (fStart <= child && child < fEnd) {
113             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
114             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
115             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
116             else if (supportSize == 2) {
117               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
118               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
119               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
120               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
121             }
122           }
123         }
124       }
125     }
126     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
127     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
128     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
129     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
130     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
131   }
132   /* Combine local and global adjacencies */
133   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
134     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
135     if (nroots > 0) {if (cellNum[p] < 0) continue;}
136     /* Add remote cells */
137     if (remoteCells) {
138       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
139       PetscInt       coneSize, numChildren, c, i;
140       const PetscInt *cone, *children;
141 
142       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
143       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
144       for (c = 0; c < coneSize; ++c) {
145         const PetscInt point = cone[c];
146         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
147           PetscInt *PETSC_RESTRICT pBuf;
148           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
149           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
150           *pBuf = remoteCells[point];
151         }
152         /* Handle non-conforming meshes */
153         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
154         for (i = 0; i < numChildren; ++i) {
155           const PetscInt child = children[i];
156           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
157             PetscInt *PETSC_RESTRICT pBuf;
158             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
159             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
160             *pBuf = remoteCells[child];
161           }
162         }
163       }
164     }
165     /* Add local cells */
166     adjSize = PETSC_DETERMINE;
167     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
168     for (a = 0; a < adjSize; ++a) {
169       const PetscInt point = adj[a];
170       if (point != p && pStart <= point && point < pEnd) {
171         PetscInt *PETSC_RESTRICT pBuf;
172         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
173         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
174         *pBuf = DMPlex_GlobalID(cellNum[point]);
175       }
176     }
177     (*numVertices)++;
178   }
179   ierr = PetscFree(adj);CHKERRQ(ierr);
180   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
181   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
182 
183   /* Derive CSR graph from section/segbuffer */
184   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
185   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
186   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
187   for (idx = 0, p = pStart; p < pEnd; p++) {
188     if (nroots > 0) {if (cellNum[p] < 0) continue;}
189     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
190   }
191   vOffsets[*numVertices] = size;
192   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
193 
194   if (nroots >= 0) {
195     /* Filter out duplicate edges using section/segbuffer */
196     ierr = PetscSectionReset(section);CHKERRQ(ierr);
197     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
198     for (p = 0; p < *numVertices; p++) {
199       PetscInt start = vOffsets[p], end = vOffsets[p+1];
200       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
201       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
202       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
203       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
204       ierr = PetscMemcpy(edges, &graph[start], numEdges*sizeof(*edges));CHKERRQ(ierr);
205     }
206     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
207     ierr = PetscFree(graph);CHKERRQ(ierr);
208     /* Derive CSR graph from section/segbuffer */
209     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
210     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
211     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
212     for (idx = 0, p = 0; p < *numVertices; p++) {
213       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
214     }
215     vOffsets[*numVertices] = size;
216     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
217   } else {
218     /* Sort adjacencies (not strictly necessary) */
219     for (p = 0; p < *numVertices; p++) {
220       PetscInt start = vOffsets[p], end = vOffsets[p+1];
221       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
222     }
223   }
224 
225   if (offsets) *offsets = vOffsets;
226   if (adjacency) *adjacency = graph;
227 
228   /* Cleanup */
229   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
230   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
231   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
232   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
233   PetscFunctionReturn(0);
234 }
235 
236 static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
237 {
238   Mat            conn, CSR;
239   IS             fis, cis, cis_own;
240   PetscSF        sfPoint;
241   const char     *prefix;
242   const PetscInt *rows, *cols, *ii, *jj;
243   PetscInt       *idxs,*idxs2;
244   PetscInt       dim, depth, floc, cloc, i, M, N, c, m, cStart, cEnd, fStart, fEnd;
245   PetscMPIInt    rank;
246   PetscBool      flg;
247   PetscErrorCode ierr;
248 
249   PetscFunctionBegin;
250   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
251   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
252   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
253   if (dim != depth) {
254     /* We do not handle the uninterpolated case here */
255     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
256     /* DMPlexCreateNeighborCSR does not make a numbering */
257     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
258     /* Different behavior for empty graphs */
259     if (!*numVertices) {
260       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
261       (*offsets)[0] = 0;
262     }
263     /* Broken in parallel */
264     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
265     PetscFunctionReturn(0);
266   }
267   /* Interpolated and parallel case */
268   ierr = DMGetOptionsPrefix(dm, &prefix);CHKERRQ(ierr);
269 
270   /* numbering */
271   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
272   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
273   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
274   ierr = DMPlexCreateNumbering_Internal(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
275   ierr = DMPlexCreateNumbering_Internal(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
276   if (globalNumbering) {
277     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
278   }
279 
280   /* get positive global ids and local sizes for facets and cells */
281   ierr = ISGetLocalSize(fis, &m);CHKERRQ(ierr);
282   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
283   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
284   for (i = 0, floc = 0; i < m; i++) {
285     const PetscInt p = rows[i];
286 
287     if (p < 0) {
288       idxs[i] = -(p+1);
289     } else {
290       idxs[i] = p;
291       floc   += 1;
292     }
293   }
294   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
295   ierr = ISDestroy(&fis);CHKERRQ(ierr);
296   ierr = ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);CHKERRQ(ierr);
297 
298   ierr = ISGetLocalSize(cis, &m);CHKERRQ(ierr);
299   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
300   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
301   ierr = PetscMalloc1(m, &idxs2);CHKERRQ(ierr);
302   for (i = 0, cloc = 0; i < m; i++) {
303     const PetscInt p = cols[i];
304 
305     if (p < 0) {
306       idxs[i] = -(p+1);
307     } else {
308       idxs[i]       = p;
309       idxs2[cloc++] = p;
310     }
311   }
312   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
313   ierr = ISDestroy(&cis);CHKERRQ(ierr);
314   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);CHKERRQ(ierr);
315   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);CHKERRQ(ierr);
316 
317   ierr = PetscObjectSetName((PetscObject)cis, "Local cells");CHKERRQ(ierr);
318   ierr = PetscObjectSetName((PetscObject)cis_own, "Owned local cells");CHKERRQ(ierr);
319   ierr = PetscObjectSetOptionsPrefix((PetscObject)cis, prefix);CHKERRQ(ierr);
320   ierr = PetscObjectSetOptionsPrefix((PetscObject)cis_own, prefix);CHKERRQ(ierr);
321   ierr = ISViewFromOptions(cis, NULL, "-dm_plex_csr_is_view");CHKERRQ(ierr);
322   ierr = ISViewFromOptions(cis_own, NULL, "-dm_plex_csr_is_view");CHKERRQ(ierr);
323 
324   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
325   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
326   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
327   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
328   ierr = DMPlexGetMaxSizes(dm, NULL, &m);CHKERRQ(ierr);
329   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
330 
331   /* Assemble matrix */
332   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
333   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
334   for (c = cStart; c < cEnd; c++) {
335     const PetscInt *cone;
336     PetscInt        coneSize, row, col, f;
337 
338     col  = cols[c-cStart];
339     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
340     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
341     for (f = 0; f < coneSize; f++) {
342       const PetscScalar v = 1.0;
343       const PetscInt *children;
344       PetscInt        numChildren, ch;
345 
346       row  = rows[cone[f]-fStart];
347       ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
348 
349       /* non-conforming meshes */
350       ierr = DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);CHKERRQ(ierr);
351       for (ch = 0; ch < numChildren; ch++) {
352         const PetscInt child = children[ch];
353 
354         if (child < fStart || child >= fEnd) continue;
355         row  = rows[child-fStart];
356         ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
357       }
358     }
359   }
360   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
361   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
362   ierr = ISDestroy(&fis);CHKERRQ(ierr);
363   ierr = ISDestroy(&cis);CHKERRQ(ierr);
364   ierr = MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
365   ierr = MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
366 
367   ierr = PetscObjectSetName((PetscObject)conn, "F-C connectivity");CHKERRQ(ierr);
368   ierr = MatSetOptionsPrefix(conn, prefix);CHKERRQ(ierr);
369   ierr = MatViewFromOptions(conn, NULL, "-dm_plex_csr_conn_view");CHKERRQ(ierr);
370 
371   /* Get parallel CSR by doing conn^T * conn */
372   ierr = MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);CHKERRQ(ierr);
373   ierr = MatDestroy(&conn);CHKERRQ(ierr);
374 
375   ierr = PetscObjectSetName((PetscObject)CSR, "Connectivity");CHKERRQ(ierr);
376   ierr = MatSetOptionsPrefix(CSR, prefix);CHKERRQ(ierr);
377   ierr = DMViewFromOptions(dm, NULL, "-dm_plex_csr_dm_view");CHKERRQ(ierr);
378   ierr = MatViewFromOptions(CSR, NULL, "-dm_plex_csr_mat_view");CHKERRQ(ierr);
379 
380   /* extract local part of the CSR */
381   ierr = MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);CHKERRQ(ierr);
382   ierr = MatDestroy(&CSR);CHKERRQ(ierr);
383   ierr = MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
384   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
385 
386   /* get back requested output */
387   if (numVertices) *numVertices = m;
388   if (offsets) {
389     ierr = PetscCalloc1(m+1, &idxs);CHKERRQ(ierr);
390     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
391     *offsets = idxs;
392   }
393   if (adjacency) {
394     ierr = PetscMalloc1(ii[m] - m, &idxs);CHKERRQ(ierr);
395     ierr = ISGetIndices(cis_own, &rows);CHKERRQ(ierr);
396     for (i = 0, c = 0; i < m; i++) {
397       PetscInt j, g = rows[i];
398 
399       for (j = ii[i]; j < ii[i+1]; j++) {
400         if (jj[j] == g) continue; /* again, self-connectivity */
401         idxs[c++] = jj[j];
402       }
403     }
404     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
405     ierr = ISRestoreIndices(cis_own, &rows);CHKERRQ(ierr);
406     *adjacency = idxs;
407   }
408 
409   /* cleanup */
410   ierr = ISDestroy(&cis_own);CHKERRQ(ierr);
411   ierr = MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
412   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
413   ierr = MatDestroy(&conn);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
419 
420   Input Parameters:
421 + dm      - The mesh DM dm
422 - height  - Height of the strata from which to construct the graph
423 
424   Output Parameter:
425 + numVertices     - Number of vertices in the graph
426 . offsets         - Point offsets in the graph
427 . adjacency       - Point connectivity in the graph
428 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
429 
430   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
431   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().
432 
433   Level: developer
434 
435 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
436 @*/
437 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
438 {
439   PetscErrorCode ierr;
440   PetscBool      usemat = PETSC_FALSE;
441 
442   PetscFunctionBegin;
443   ierr = PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);CHKERRQ(ierr);
444   if (usemat) {
445     ierr = DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
446   } else {
447     ierr = DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
448   }
449   PetscFunctionReturn(0);
450 }
451 
452 /*@C
453   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
454 
455   Collective
456 
457   Input Arguments:
458 + dm - The DMPlex
459 - cellHeight - The height of mesh points to treat as cells (default should be 0)
460 
461   Output Arguments:
462 + numVertices - The number of local vertices in the graph, or cells in the mesh.
463 . offsets     - The offset to the adjacency list for each cell
464 - adjacency   - The adjacency list for all cells
465 
466   Note: This is suitable for input to a mesh partitioner like ParMetis.
467 
468   Level: advanced
469 
470 .seealso: DMPlexCreate()
471 @*/
472 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
473 {
474   const PetscInt maxFaceCases = 30;
475   PetscInt       numFaceCases = 0;
476   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
477   PetscInt      *off, *adj;
478   PetscInt      *neighborCells = NULL;
479   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
480   PetscErrorCode ierr;
481 
482   PetscFunctionBegin;
483   /* For parallel partitioning, I think you have to communicate supports */
484   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
485   cellDim = dim - cellHeight;
486   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
487   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
488   if (cEnd - cStart == 0) {
489     if (numVertices) *numVertices = 0;
490     if (offsets)   *offsets   = NULL;
491     if (adjacency) *adjacency = NULL;
492     PetscFunctionReturn(0);
493   }
494   numCells  = cEnd - cStart;
495   faceDepth = depth - cellHeight;
496   if (dim == depth) {
497     PetscInt f, fStart, fEnd;
498 
499     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
500     /* Count neighboring cells */
501     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
502     for (f = fStart; f < fEnd; ++f) {
503       const PetscInt *support;
504       PetscInt        supportSize;
505       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
506       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
507       if (supportSize == 2) {
508         PetscInt numChildren;
509 
510         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
511         if (!numChildren) {
512           ++off[support[0]-cStart+1];
513           ++off[support[1]-cStart+1];
514         }
515       }
516     }
517     /* Prefix sum */
518     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
519     if (adjacency) {
520       PetscInt *tmp;
521 
522       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
523       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
524       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
525       /* Get neighboring cells */
526       for (f = fStart; f < fEnd; ++f) {
527         const PetscInt *support;
528         PetscInt        supportSize;
529         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
530         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
531         if (supportSize == 2) {
532           PetscInt numChildren;
533 
534           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
535           if (!numChildren) {
536             adj[tmp[support[0]-cStart]++] = support[1];
537             adj[tmp[support[1]-cStart]++] = support[0];
538           }
539         }
540       }
541 #if defined(PETSC_USE_DEBUG)
542       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
543 #endif
544       ierr = PetscFree(tmp);CHKERRQ(ierr);
545     }
546     if (numVertices) *numVertices = numCells;
547     if (offsets)   *offsets   = off;
548     if (adjacency) *adjacency = adj;
549     PetscFunctionReturn(0);
550   }
551   /* Setup face recognition */
552   if (faceDepth == 1) {
553     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
554 
555     for (c = cStart; c < cEnd; ++c) {
556       PetscInt corners;
557 
558       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
559       if (!cornersSeen[corners]) {
560         PetscInt nFV;
561 
562         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
563         cornersSeen[corners] = 1;
564 
565         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
566 
567         numFaceVertices[numFaceCases++] = nFV;
568       }
569     }
570   }
571   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
572   /* Count neighboring cells */
573   for (cell = cStart; cell < cEnd; ++cell) {
574     PetscInt numNeighbors = PETSC_DETERMINE, n;
575 
576     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
577     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
578     for (n = 0; n < numNeighbors; ++n) {
579       PetscInt        cellPair[2];
580       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
581       PetscInt        meetSize = 0;
582       const PetscInt *meet    = NULL;
583 
584       cellPair[0] = cell; cellPair[1] = neighborCells[n];
585       if (cellPair[0] == cellPair[1]) continue;
586       if (!found) {
587         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
588         if (meetSize) {
589           PetscInt f;
590 
591           for (f = 0; f < numFaceCases; ++f) {
592             if (numFaceVertices[f] == meetSize) {
593               found = PETSC_TRUE;
594               break;
595             }
596           }
597         }
598         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
599       }
600       if (found) ++off[cell-cStart+1];
601     }
602   }
603   /* Prefix sum */
604   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
605 
606   if (adjacency) {
607     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
608     /* Get neighboring cells */
609     for (cell = cStart; cell < cEnd; ++cell) {
610       PetscInt numNeighbors = PETSC_DETERMINE, n;
611       PetscInt cellOffset   = 0;
612 
613       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
614       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
615       for (n = 0; n < numNeighbors; ++n) {
616         PetscInt        cellPair[2];
617         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
618         PetscInt        meetSize = 0;
619         const PetscInt *meet    = NULL;
620 
621         cellPair[0] = cell; cellPair[1] = neighborCells[n];
622         if (cellPair[0] == cellPair[1]) continue;
623         if (!found) {
624           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
625           if (meetSize) {
626             PetscInt f;
627 
628             for (f = 0; f < numFaceCases; ++f) {
629               if (numFaceVertices[f] == meetSize) {
630                 found = PETSC_TRUE;
631                 break;
632               }
633             }
634           }
635           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
636         }
637         if (found) {
638           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
639           ++cellOffset;
640         }
641       }
642     }
643   }
644   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
645   if (numVertices) *numVertices = numCells;
646   if (offsets)   *offsets   = off;
647   if (adjacency) *adjacency = adj;
648   PetscFunctionReturn(0);
649 }
650 
651 /*@C
652   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
653 
654   Not Collective
655 
656   Input Parameters:
657 + name        - The name of a new user-defined creation routine
658 - create_func - The creation routine itself
659 
660   Notes:
661   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
662 
663   Sample usage:
664 .vb
665     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
666 .ve
667 
668   Then, your PetscPartitioner type can be chosen with the procedural interface via
669 .vb
670     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
671     PetscPartitionerSetType(PetscPartitioner, "my_part");
672 .ve
673    or at runtime via the option
674 .vb
675     -petscpartitioner_type my_part
676 .ve
677 
678   Level: advanced
679 
680 .keywords: PetscPartitioner, register
681 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
682 
683 @*/
684 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
685 {
686   PetscErrorCode ierr;
687 
688   PetscFunctionBegin;
689   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
690   PetscFunctionReturn(0);
691 }
692 
693 /*@C
694   PetscPartitionerSetType - Builds a particular PetscPartitioner
695 
696   Collective on PetscPartitioner
697 
698   Input Parameters:
699 + part - The PetscPartitioner object
700 - name - The kind of partitioner
701 
702   Options Database Key:
703 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
704 
705   Level: intermediate
706 
707 .keywords: PetscPartitioner, set, type
708 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
709 @*/
710 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
711 {
712   PetscErrorCode (*r)(PetscPartitioner);
713   PetscBool      match;
714   PetscErrorCode ierr;
715 
716   PetscFunctionBegin;
717   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
718   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
719   if (match) PetscFunctionReturn(0);
720 
721   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
722   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
723   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
724 
725   if (part->ops->destroy) {
726     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
727   }
728   part->noGraph = PETSC_FALSE;
729   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
730   ierr = (*r)(part);CHKERRQ(ierr);
731   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
732   PetscFunctionReturn(0);
733 }
734 
735 /*@C
736   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
737 
738   Not Collective
739 
740   Input Parameter:
741 . part - The PetscPartitioner
742 
743   Output Parameter:
744 . name - The PetscPartitioner type name
745 
746   Level: intermediate
747 
748 .keywords: PetscPartitioner, get, type, name
749 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
750 @*/
751 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
752 {
753   PetscErrorCode ierr;
754 
755   PetscFunctionBegin;
756   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
757   PetscValidPointer(name, 2);
758   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
759   *name = ((PetscObject) part)->type_name;
760   PetscFunctionReturn(0);
761 }
762 
763 /*@C
764   PetscPartitionerView - Views a PetscPartitioner
765 
766   Collective on PetscPartitioner
767 
768   Input Parameter:
769 + part - the PetscPartitioner object to view
770 - v    - the viewer
771 
772   Level: developer
773 
774 .seealso: PetscPartitionerDestroy()
775 @*/
776 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
777 {
778   PetscMPIInt    size;
779   PetscBool      isascii;
780   PetscErrorCode ierr;
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
784   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
785   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
786   if (isascii) {
787     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
788     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
789     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
790     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
791     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
792     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
793     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
794   }
795   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
796   PetscFunctionReturn(0);
797 }
798 
799 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
800 {
801   PetscFunctionBegin;
802   if (!currentType) {
803 #if defined(PETSC_HAVE_CHACO)
804     *defaultType = PETSCPARTITIONERCHACO;
805 #elif defined(PETSC_HAVE_PARMETIS)
806     *defaultType = PETSCPARTITIONERPARMETIS;
807 #elif defined(PETSC_HAVE_PTSCOTCH)
808     *defaultType = PETSCPARTITIONERPTSCOTCH;
809 #else
810     *defaultType = PETSCPARTITIONERSIMPLE;
811 #endif
812   } else {
813     *defaultType = currentType;
814   }
815   PetscFunctionReturn(0);
816 }
817 
818 /*@
819   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
820 
821   Collective on PetscPartitioner
822 
823   Input Parameter:
824 . part - the PetscPartitioner object to set options for
825 
826   Level: developer
827 
828 .seealso: PetscPartitionerView()
829 @*/
830 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
831 {
832   const char    *defaultType;
833   char           name[256];
834   PetscBool      flg;
835   PetscErrorCode ierr;
836 
837   PetscFunctionBegin;
838   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
839   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
840   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
841   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
842   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
843   if (flg) {
844     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
845   } else if (!((PetscObject) part)->type_name) {
846     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
847   }
848   if (part->ops->setfromoptions) {
849     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
850   }
851   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
852   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
853   /* process any options handlers added with PetscObjectAddOptionsHandler() */
854   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
855   ierr = PetscOptionsEnd();CHKERRQ(ierr);
856   PetscFunctionReturn(0);
857 }
858 
859 /*@C
860   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
861 
862   Collective on PetscPartitioner
863 
864   Input Parameter:
865 . part - the PetscPartitioner object to setup
866 
867   Level: developer
868 
869 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
870 @*/
871 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
872 {
873   PetscErrorCode ierr;
874 
875   PetscFunctionBegin;
876   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
877   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
878   PetscFunctionReturn(0);
879 }
880 
881 /*@
882   PetscPartitionerDestroy - Destroys a PetscPartitioner object
883 
884   Collective on PetscPartitioner
885 
886   Input Parameter:
887 . part - the PetscPartitioner object to destroy
888 
889   Level: developer
890 
891 .seealso: PetscPartitionerView()
892 @*/
893 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
894 {
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   if (!*part) PetscFunctionReturn(0);
899   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
900 
901   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
902   ((PetscObject) (*part))->refct = 0;
903 
904   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
905   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
906   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
907   PetscFunctionReturn(0);
908 }
909 
910 /*@
911   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
912 
913   Collective on MPI_Comm
914 
915   Input Parameter:
916 . comm - The communicator for the PetscPartitioner object
917 
918   Output Parameter:
919 . part - The PetscPartitioner object
920 
921   Level: beginner
922 
923 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
924 @*/
925 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
926 {
927   PetscPartitioner p;
928   const char       *partitionerType = NULL;
929   PetscErrorCode   ierr;
930 
931   PetscFunctionBegin;
932   PetscValidPointer(part, 2);
933   *part = NULL;
934   ierr = DMInitializePackage();CHKERRQ(ierr);
935 
936   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
937   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
938   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
939 
940   p->edgeCut = 0;
941   p->balance = 0.0;
942 
943   *part = p;
944   PetscFunctionReturn(0);
945 }
946 
947 /*@
948   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
949 
950   Collective on DM
951 
952   Input Parameters:
953 + part    - The PetscPartitioner
954 - dm      - The mesh DM
955 
956   Output Parameters:
957 + partSection     - The PetscSection giving the division of points by partition
958 - partition       - The list of points by partition
959 
960   Options Database:
961 . -petscpartitioner_view_graph - View the graph we are partitioning
962 
963   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
964 
965   Level: developer
966 
967 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
968 @*/
969 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
970 {
971   PetscMPIInt    size;
972   PetscErrorCode ierr;
973 
974   PetscFunctionBegin;
975   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
976   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
977   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
978   PetscValidPointer(partition, 5);
979   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
980   if (size == 1) {
981     PetscInt *points;
982     PetscInt  cStart, cEnd, c;
983 
984     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
985     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
986     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
987     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
988     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
989     for (c = cStart; c < cEnd; ++c) points[c] = c;
990     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
991     PetscFunctionReturn(0);
992   }
993   if (part->height == 0) {
994     PetscInt numVertices = 0;
995     PetscInt *start     = NULL;
996     PetscInt *adjacency = NULL;
997     IS       globalNumbering;
998 
999     if (!part->noGraph || part->viewGraph) {
1000       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
1001     } else {
1002       const PetscInt *idxs;
1003       PetscInt       p, pStart, pEnd;
1004 
1005       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1006       ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
1007       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1008       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
1009       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1010     }
1011     if (part->viewGraph) {
1012       PetscViewer viewer = part->viewerGraph;
1013       PetscBool   isascii;
1014       PetscInt    v, i;
1015       PetscMPIInt rank;
1016 
1017       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
1018       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
1019       if (isascii) {
1020         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1021         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
1022         for (v = 0; v < numVertices; ++v) {
1023           const PetscInt s = start[v];
1024           const PetscInt e = start[v+1];
1025 
1026           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
1027           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
1028           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
1029         }
1030         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1031         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1032       }
1033     }
1034     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no partitioning method");
1035     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
1036     ierr = PetscFree(start);CHKERRQ(ierr);
1037     ierr = PetscFree(adjacency);CHKERRQ(ierr);
1038     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1039       const PetscInt *globalNum;
1040       const PetscInt *partIdx;
1041       PetscInt       *map, cStart, cEnd;
1042       PetscInt       *adjusted, i, localSize, offset;
1043       IS             newPartition;
1044 
1045       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
1046       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
1047       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1048       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
1049       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
1050       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1051       for (i = cStart, offset = 0; i < cEnd; i++) {
1052         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1053       }
1054       for (i = 0; i < localSize; i++) {
1055         adjusted[i] = map[partIdx[i]];
1056       }
1057       ierr = PetscFree(map);CHKERRQ(ierr);
1058       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
1059       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1060       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
1061       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
1062       ierr = ISDestroy(partition);CHKERRQ(ierr);
1063       *partition = newPartition;
1064     }
1065   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1066   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1071 {
1072   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1073   PetscErrorCode          ierr;
1074 
1075   PetscFunctionBegin;
1076   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1077   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1078   ierr = PetscFree(p);CHKERRQ(ierr);
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1083 {
1084   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1085   PetscErrorCode          ierr;
1086 
1087   PetscFunctionBegin;
1088   if (p->random) {
1089     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1090     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
1091     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1092   }
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1097 {
1098   PetscBool      iascii;
1099   PetscErrorCode ierr;
1100 
1101   PetscFunctionBegin;
1102   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1103   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1104   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1105   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
1106   PetscFunctionReturn(0);
1107 }
1108 
1109 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1110 {
1111   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1112   PetscErrorCode          ierr;
1113 
1114   PetscFunctionBegin;
1115   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
1116   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
1117   ierr = PetscOptionsTail();CHKERRQ(ierr);
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1122 {
1123   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1124   PetscInt                np;
1125   PetscErrorCode          ierr;
1126 
1127   PetscFunctionBegin;
1128   if (p->random) {
1129     PetscRandom r;
1130     PetscInt   *sizes, *points, v, p;
1131     PetscMPIInt rank;
1132 
1133     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1134     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1135     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
1136     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
1137     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
1138     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1139     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1140     for (v = numVertices-1; v > 0; --v) {
1141       PetscReal val;
1142       PetscInt  w, tmp;
1143 
1144       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
1145       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
1146       w    = PetscFloorReal(val);
1147       tmp       = points[v];
1148       points[v] = points[w];
1149       points[w] = tmp;
1150     }
1151     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1152     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
1153     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
1154   }
1155   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1156   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
1157   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1158   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
1159   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1160   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
1161   *partition = p->partition;
1162   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
1163   PetscFunctionReturn(0);
1164 }
1165 
1166 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1167 {
1168   PetscFunctionBegin;
1169   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1170   part->ops->view           = PetscPartitionerView_Shell;
1171   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1172   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1173   part->ops->partition      = PetscPartitionerPartition_Shell;
1174   PetscFunctionReturn(0);
1175 }
1176 
1177 /*MC
1178   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
1179 
1180   Level: intermediate
1181 
1182 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1183 M*/
1184 
1185 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1186 {
1187   PetscPartitioner_Shell *p;
1188   PetscErrorCode          ierr;
1189 
1190   PetscFunctionBegin;
1191   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1192   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1193   part->data = p;
1194 
1195   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1196   p->random = PETSC_FALSE;
1197   PetscFunctionReturn(0);
1198 }
1199 
1200 /*@C
1201   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1202 
1203   Collective on Part
1204 
1205   Input Parameters:
1206 + part   - The PetscPartitioner
1207 . size   - The number of partitions
1208 . sizes  - array of size size (or NULL) providing the number of points in each partition
1209 - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1210 
1211   Level: developer
1212 
1213   Notes:
1214 
1215     It is safe to free the sizes and points arrays after use in this routine.
1216 
1217 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1218 @*/
1219 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1220 {
1221   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1222   PetscInt                proc, numPoints;
1223   PetscErrorCode          ierr;
1224 
1225   PetscFunctionBegin;
1226   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1227   if (sizes)  {PetscValidPointer(sizes, 3);}
1228   if (points) {PetscValidPointer(points, 4);}
1229   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1230   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1231   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1232   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1233   if (sizes) {
1234     for (proc = 0; proc < size; ++proc) {
1235       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1236     }
1237   }
1238   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1239   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1240   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1241   PetscFunctionReturn(0);
1242 }
1243 
1244 /*@
1245   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1246 
1247   Collective on Part
1248 
1249   Input Parameters:
1250 + part   - The PetscPartitioner
1251 - random - The flag to use a random partition
1252 
1253   Level: intermediate
1254 
1255 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1256 @*/
1257 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1258 {
1259   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1260 
1261   PetscFunctionBegin;
1262   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1263   p->random = random;
1264   PetscFunctionReturn(0);
1265 }
1266 
1267 /*@
1268   PetscPartitionerShellGetRandom - get the flag to use a random partition
1269 
1270   Collective on Part
1271 
1272   Input Parameter:
1273 . part   - The PetscPartitioner
1274 
1275   Output Parameter
1276 . random - The flag to use a random partition
1277 
1278   Level: intermediate
1279 
1280 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1281 @*/
1282 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1283 {
1284   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1285 
1286   PetscFunctionBegin;
1287   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1288   PetscValidPointer(random, 2);
1289   *random = p->random;
1290   PetscFunctionReturn(0);
1291 }
1292 
1293 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1294 {
1295   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1296   PetscErrorCode          ierr;
1297 
1298   PetscFunctionBegin;
1299   ierr = PetscFree(p);CHKERRQ(ierr);
1300   PetscFunctionReturn(0);
1301 }
1302 
1303 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1304 {
1305   PetscFunctionBegin;
1306   PetscFunctionReturn(0);
1307 }
1308 
1309 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1310 {
1311   PetscBool      iascii;
1312   PetscErrorCode ierr;
1313 
1314   PetscFunctionBegin;
1315   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1316   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1317   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1318   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1323 {
1324   MPI_Comm       comm;
1325   PetscInt       np;
1326   PetscMPIInt    size;
1327   PetscErrorCode ierr;
1328 
1329   PetscFunctionBegin;
1330   comm = PetscObjectComm((PetscObject)part);
1331   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1332   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1333   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1334   if (size == 1) {
1335     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1336   } else {
1337     PetscMPIInt rank;
1338     PetscInt nvGlobal, *offsets, myFirst, myLast;
1339 
1340     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1341     offsets[0] = 0;
1342     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1343     for (np = 2; np <= size; np++) {
1344       offsets[np] += offsets[np-1];
1345     }
1346     nvGlobal = offsets[size];
1347     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1348     myFirst = offsets[rank];
1349     myLast  = offsets[rank + 1] - 1;
1350     ierr = PetscFree(offsets);CHKERRQ(ierr);
1351     if (numVertices) {
1352       PetscInt firstPart = 0, firstLargePart = 0;
1353       PetscInt lastPart = 0, lastLargePart = 0;
1354       PetscInt rem = nvGlobal % nparts;
1355       PetscInt pSmall = nvGlobal/nparts;
1356       PetscInt pBig = nvGlobal/nparts + 1;
1357 
1358 
1359       if (rem) {
1360         firstLargePart = myFirst / pBig;
1361         lastLargePart  = myLast  / pBig;
1362 
1363         if (firstLargePart < rem) {
1364           firstPart = firstLargePart;
1365         } else {
1366           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1367         }
1368         if (lastLargePart < rem) {
1369           lastPart = lastLargePart;
1370         } else {
1371           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1372         }
1373       } else {
1374         firstPart = myFirst / (nvGlobal/nparts);
1375         lastPart  = myLast  / (nvGlobal/nparts);
1376       }
1377 
1378       for (np = firstPart; np <= lastPart; np++) {
1379         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1380         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1381 
1382         PartStart = PetscMax(PartStart,myFirst);
1383         PartEnd   = PetscMin(PartEnd,myLast+1);
1384         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1385       }
1386     }
1387   }
1388   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1389   PetscFunctionReturn(0);
1390 }
1391 
1392 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1393 {
1394   PetscFunctionBegin;
1395   part->noGraph        = PETSC_TRUE;
1396   part->ops->view      = PetscPartitionerView_Simple;
1397   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1398   part->ops->partition = PetscPartitionerPartition_Simple;
1399   PetscFunctionReturn(0);
1400 }
1401 
1402 /*MC
1403   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1404 
1405   Level: intermediate
1406 
1407 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1408 M*/
1409 
1410 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1411 {
1412   PetscPartitioner_Simple *p;
1413   PetscErrorCode           ierr;
1414 
1415   PetscFunctionBegin;
1416   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1417   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1418   part->data = p;
1419 
1420   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1421   PetscFunctionReturn(0);
1422 }
1423 
1424 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1425 {
1426   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1427   PetscErrorCode          ierr;
1428 
1429   PetscFunctionBegin;
1430   ierr = PetscFree(p);CHKERRQ(ierr);
1431   PetscFunctionReturn(0);
1432 }
1433 
1434 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1435 {
1436   PetscFunctionBegin;
1437   PetscFunctionReturn(0);
1438 }
1439 
1440 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1441 {
1442   PetscBool      iascii;
1443   PetscErrorCode ierr;
1444 
1445   PetscFunctionBegin;
1446   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1447   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1448   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1449   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1450   PetscFunctionReturn(0);
1451 }
1452 
1453 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1454 {
1455   PetscInt       np;
1456   PetscErrorCode ierr;
1457 
1458   PetscFunctionBegin;
1459   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1460   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1461   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1462   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1463   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1464   PetscFunctionReturn(0);
1465 }
1466 
1467 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1468 {
1469   PetscFunctionBegin;
1470   part->noGraph        = PETSC_TRUE;
1471   part->ops->view      = PetscPartitionerView_Gather;
1472   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1473   part->ops->partition = PetscPartitionerPartition_Gather;
1474   PetscFunctionReturn(0);
1475 }
1476 
1477 /*MC
1478   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1479 
1480   Level: intermediate
1481 
1482 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1483 M*/
1484 
1485 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1486 {
1487   PetscPartitioner_Gather *p;
1488   PetscErrorCode           ierr;
1489 
1490   PetscFunctionBegin;
1491   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1492   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1493   part->data = p;
1494 
1495   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1496   PetscFunctionReturn(0);
1497 }
1498 
1499 
1500 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1501 {
1502   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1503   PetscErrorCode          ierr;
1504 
1505   PetscFunctionBegin;
1506   ierr = PetscFree(p);CHKERRQ(ierr);
1507   PetscFunctionReturn(0);
1508 }
1509 
1510 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1511 {
1512   PetscFunctionBegin;
1513   PetscFunctionReturn(0);
1514 }
1515 
1516 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1517 {
1518   PetscBool      iascii;
1519   PetscErrorCode ierr;
1520 
1521   PetscFunctionBegin;
1522   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1523   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1524   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1525   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1526   PetscFunctionReturn(0);
1527 }
1528 
1529 #if defined(PETSC_HAVE_CHACO)
1530 #if defined(PETSC_HAVE_UNISTD_H)
1531 #include <unistd.h>
1532 #endif
1533 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1534 #include <chaco.h>
1535 #else
1536 /* Older versions of Chaco do not have an include file */
1537 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1538                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1539                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1540                        int mesh_dims[3], double *goal, int global_method, int local_method,
1541                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1542 #endif
1543 extern int FREE_GRAPH;
1544 #endif
1545 
1546 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1547 {
1548 #if defined(PETSC_HAVE_CHACO)
1549   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1550   MPI_Comm       comm;
1551   int            nvtxs          = numVertices; /* number of vertices in full graph */
1552   int           *vwgts          = NULL;   /* weights for all vertices */
1553   float         *ewgts          = NULL;   /* weights for all edges */
1554   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1555   char          *outassignname  = NULL;   /*  name of assignment output file */
1556   char          *outfilename    = NULL;   /* output file name */
1557   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1558   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1559   int            mesh_dims[3];            /* dimensions of mesh of processors */
1560   double        *goal          = NULL;    /* desired set sizes for each set */
1561   int            global_method = 1;       /* global partitioning algorithm */
1562   int            local_method  = 1;       /* local partitioning algorithm */
1563   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1564   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1565   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1566   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1567   long           seed          = 123636512; /* for random graph mutations */
1568 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1569   int           *assignment;              /* Output partition */
1570 #else
1571   short int     *assignment;              /* Output partition */
1572 #endif
1573   int            fd_stdout, fd_pipe[2];
1574   PetscInt      *points;
1575   int            i, v, p;
1576   PetscErrorCode ierr;
1577 
1578   PetscFunctionBegin;
1579   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1580 #if defined (PETSC_USE_DEBUG)
1581   {
1582     int ival,isum;
1583     PetscBool distributed;
1584 
1585     ival = (numVertices > 0);
1586     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1587     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1588     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1589   }
1590 #endif
1591   if (!numVertices) {
1592     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1593     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1594     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1595     PetscFunctionReturn(0);
1596   }
1597   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1598   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1599 
1600   if (global_method == INERTIAL_METHOD) {
1601     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1602     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1603   }
1604   mesh_dims[0] = nparts;
1605   mesh_dims[1] = 1;
1606   mesh_dims[2] = 1;
1607   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1608   /* Chaco outputs to stdout. We redirect this to a buffer. */
1609   /* TODO: check error codes for UNIX calls */
1610 #if defined(PETSC_HAVE_UNISTD_H)
1611   {
1612     int piperet;
1613     piperet = pipe(fd_pipe);
1614     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1615     fd_stdout = dup(1);
1616     close(1);
1617     dup2(fd_pipe[1], 1);
1618   }
1619 #endif
1620   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1621                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1622                    vmax, ndims, eigtol, seed);
1623 #if defined(PETSC_HAVE_UNISTD_H)
1624   {
1625     char msgLog[10000];
1626     int  count;
1627 
1628     fflush(stdout);
1629     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1630     if (count < 0) count = 0;
1631     msgLog[count] = 0;
1632     close(1);
1633     dup2(fd_stdout, 1);
1634     close(fd_stdout);
1635     close(fd_pipe[0]);
1636     close(fd_pipe[1]);
1637     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1638   }
1639 #else
1640   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1641 #endif
1642   /* Convert to PetscSection+IS */
1643   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1644   for (v = 0; v < nvtxs; ++v) {
1645     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1646   }
1647   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1648   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1649   for (p = 0, i = 0; p < nparts; ++p) {
1650     for (v = 0; v < nvtxs; ++v) {
1651       if (assignment[v] == p) points[i++] = v;
1652     }
1653   }
1654   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1655   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1656   if (global_method == INERTIAL_METHOD) {
1657     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1658   }
1659   ierr = PetscFree(assignment);CHKERRQ(ierr);
1660   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1661   PetscFunctionReturn(0);
1662 #else
1663   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1664 #endif
1665 }
1666 
1667 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1668 {
1669   PetscFunctionBegin;
1670   part->noGraph        = PETSC_FALSE;
1671   part->ops->view      = PetscPartitionerView_Chaco;
1672   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1673   part->ops->partition = PetscPartitionerPartition_Chaco;
1674   PetscFunctionReturn(0);
1675 }
1676 
1677 /*MC
1678   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1679 
1680   Level: intermediate
1681 
1682 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1683 M*/
1684 
1685 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1686 {
1687   PetscPartitioner_Chaco *p;
1688   PetscErrorCode          ierr;
1689 
1690   PetscFunctionBegin;
1691   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1692   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1693   part->data = p;
1694 
1695   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1696   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1697   PetscFunctionReturn(0);
1698 }
1699 
1700 static const char *ptypes[] = {"kway", "rb"};
1701 
1702 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1703 {
1704   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1705   PetscErrorCode             ierr;
1706 
1707   PetscFunctionBegin;
1708   ierr = PetscFree(p);CHKERRQ(ierr);
1709   PetscFunctionReturn(0);
1710 }
1711 
1712 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1713 {
1714   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1715   PetscErrorCode             ierr;
1716 
1717   PetscFunctionBegin;
1718   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1719   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1720   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1721   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1722   ierr = PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);CHKERRQ(ierr);
1723   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1724   PetscFunctionReturn(0);
1725 }
1726 
1727 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1728 {
1729   PetscBool      iascii;
1730   PetscErrorCode ierr;
1731 
1732   PetscFunctionBegin;
1733   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1734   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1735   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1736   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1737   PetscFunctionReturn(0);
1738 }
1739 
1740 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1741 {
1742   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1743   PetscErrorCode            ierr;
1744 
1745   PetscFunctionBegin;
1746   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1747   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1748   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1749   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1750   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);CHKERRQ(ierr);
1751   ierr = PetscOptionsTail();CHKERRQ(ierr);
1752   PetscFunctionReturn(0);
1753 }
1754 
1755 #if defined(PETSC_HAVE_PARMETIS)
1756 #include <parmetis.h>
1757 #endif
1758 
1759 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1760 {
1761 #if defined(PETSC_HAVE_PARMETIS)
1762   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1763   MPI_Comm       comm;
1764   PetscSection   section;
1765   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1766   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1767   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1768   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1769   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1770   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1771   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1772   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1773   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1774   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1775   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1776   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1777   PetscInt       options[64];               /* Options */
1778   /* Outputs */
1779   PetscInt       v, i, *assignment, *points;
1780   PetscMPIInt    size, rank, p;
1781   PetscErrorCode ierr;
1782 
1783   PetscFunctionBegin;
1784   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1785   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1786   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1787   /* Calculate vertex distribution */
1788   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1789   vtxdist[0] = 0;
1790   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1791   for (p = 2; p <= size; ++p) {
1792     vtxdist[p] += vtxdist[p-1];
1793   }
1794   /* Calculate partition weights */
1795   for (p = 0; p < nparts; ++p) {
1796     tpwgts[p] = 1.0/nparts;
1797   }
1798   ubvec[0] = pm->imbalanceRatio;
1799   /* Weight cells by dofs on cell by default */
1800   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1801   for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1802   if (section) {
1803     PetscInt cStart, cEnd, dof;
1804 
1805     /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1806     /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1807     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1808     for (v = cStart; v < cStart + numVertices; ++v) {
1809       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1810       vwgt[v-cStart] = PetscMax(dof, 1);
1811     }
1812   }
1813   wgtflag |= 2; /* have weights on graph vertices */
1814 
1815   if (nparts == 1) {
1816     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1817   } else {
1818     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1819     if (vtxdist[p+1] == vtxdist[size]) {
1820       if (rank == p) {
1821         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1822         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1823         options[METIS_OPTION_SEED]   = pm->randomSeed;
1824         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1825         if (metis_ptype == 1) {
1826           PetscStackPush("METIS_PartGraphRecursive");
1827           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1828           PetscStackPop;
1829           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1830         } else {
1831           /*
1832            It would be nice to activate the two options below, but they would need some actual testing.
1833            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1834            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1835           */
1836           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1837           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1838           PetscStackPush("METIS_PartGraphKway");
1839           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1840           PetscStackPop;
1841           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1842         }
1843       }
1844     } else {
1845       options[0] = 1; /*use options */
1846       options[1] = pm->debugFlag;
1847       options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1848       PetscStackPush("ParMETIS_V3_PartKway");
1849       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1850       PetscStackPop;
1851       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1852     }
1853   }
1854   /* Convert to PetscSection+IS */
1855   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1856   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1857   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1858   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1859   for (p = 0, i = 0; p < nparts; ++p) {
1860     for (v = 0; v < nvtxs; ++v) {
1861       if (assignment[v] == p) points[i++] = v;
1862     }
1863   }
1864   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1865   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1866   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1867   PetscFunctionReturn(0);
1868 #else
1869   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1870 #endif
1871 }
1872 
1873 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1874 {
1875   PetscFunctionBegin;
1876   part->noGraph             = PETSC_FALSE;
1877   part->ops->view           = PetscPartitionerView_ParMetis;
1878   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1879   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1880   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1881   PetscFunctionReturn(0);
1882 }
1883 
1884 /*MC
1885   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1886 
1887   Level: intermediate
1888 
1889 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1890 M*/
1891 
1892 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1893 {
1894   PetscPartitioner_ParMetis *p;
1895   PetscErrorCode          ierr;
1896 
1897   PetscFunctionBegin;
1898   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1899   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1900   part->data = p;
1901 
1902   p->ptype          = 0;
1903   p->imbalanceRatio = 1.05;
1904   p->debugFlag      = 0;
1905   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
1906 
1907   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1908   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1909   PetscFunctionReturn(0);
1910 }
1911 
1912 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1913 const char PTScotchPartitionerCitation[] =
1914   "@article{PTSCOTCH,\n"
1915   "  author  = {C. Chevalier and F. Pellegrini},\n"
1916   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1917   "  journal = {Parallel Computing},\n"
1918   "  volume  = {34},\n"
1919   "  number  = {6},\n"
1920   "  pages   = {318--331},\n"
1921   "  year    = {2008},\n"
1922   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1923   "}\n";
1924 
1925 #if defined(PETSC_HAVE_PTSCOTCH)
1926 
1927 EXTERN_C_BEGIN
1928 #include <ptscotch.h>
1929 EXTERN_C_END
1930 
1931 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1932 
1933 static int PTScotch_Strategy(PetscInt strategy)
1934 {
1935   switch (strategy) {
1936   case  0: return SCOTCH_STRATDEFAULT;
1937   case  1: return SCOTCH_STRATQUALITY;
1938   case  2: return SCOTCH_STRATSPEED;
1939   case  3: return SCOTCH_STRATBALANCE;
1940   case  4: return SCOTCH_STRATSAFETY;
1941   case  5: return SCOTCH_STRATSCALABILITY;
1942   case  6: return SCOTCH_STRATRECURSIVE;
1943   case  7: return SCOTCH_STRATREMAP;
1944   default: return SCOTCH_STRATDEFAULT;
1945   }
1946 }
1947 
1948 
1949 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1950                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1951 {
1952   SCOTCH_Graph   grafdat;
1953   SCOTCH_Strat   stradat;
1954   SCOTCH_Num     vertnbr = n;
1955   SCOTCH_Num     edgenbr = xadj[n];
1956   SCOTCH_Num*    velotab = vtxwgt;
1957   SCOTCH_Num*    edlotab = adjwgt;
1958   SCOTCH_Num     flagval = strategy;
1959   double         kbalval = imbalance;
1960   PetscErrorCode ierr;
1961 
1962   PetscFunctionBegin;
1963   {
1964     PetscBool flg = PETSC_TRUE;
1965     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1966     if (!flg) velotab = NULL;
1967   }
1968   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1969   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1970   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1971   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1972 #if defined(PETSC_USE_DEBUG)
1973   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1974 #endif
1975   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1976   SCOTCH_stratExit(&stradat);
1977   SCOTCH_graphExit(&grafdat);
1978   PetscFunctionReturn(0);
1979 }
1980 
1981 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1982                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1983 {
1984   PetscMPIInt     procglbnbr;
1985   PetscMPIInt     proclocnum;
1986   SCOTCH_Arch     archdat;
1987   SCOTCH_Dgraph   grafdat;
1988   SCOTCH_Dmapping mappdat;
1989   SCOTCH_Strat    stradat;
1990   SCOTCH_Num      vertlocnbr;
1991   SCOTCH_Num      edgelocnbr;
1992   SCOTCH_Num*     veloloctab = vtxwgt;
1993   SCOTCH_Num*     edloloctab = adjwgt;
1994   SCOTCH_Num      flagval = strategy;
1995   double          kbalval = imbalance;
1996   PetscErrorCode  ierr;
1997 
1998   PetscFunctionBegin;
1999   {
2000     PetscBool flg = PETSC_TRUE;
2001     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2002     if (!flg) veloloctab = NULL;
2003   }
2004   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
2005   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
2006   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
2007   edgelocnbr = xadj[vertlocnbr];
2008 
2009   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
2010   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
2011 #if defined(PETSC_USE_DEBUG)
2012   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
2013 #endif
2014   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2015   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
2016   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2017   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2018   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
2019 
2020   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2021   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2022   SCOTCH_archExit(&archdat);
2023   SCOTCH_stratExit(&stradat);
2024   SCOTCH_dgraphExit(&grafdat);
2025   PetscFunctionReturn(0);
2026 }
2027 
2028 #endif /* PETSC_HAVE_PTSCOTCH */
2029 
2030 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2031 {
2032   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2033   PetscErrorCode             ierr;
2034 
2035   PetscFunctionBegin;
2036   ierr = PetscFree(p);CHKERRQ(ierr);
2037   PetscFunctionReturn(0);
2038 }
2039 
2040 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2041 {
2042   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2043   PetscErrorCode            ierr;
2044 
2045   PetscFunctionBegin;
2046   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
2047   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
2048   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
2049   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
2050   PetscFunctionReturn(0);
2051 }
2052 
2053 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2054 {
2055   PetscBool      iascii;
2056   PetscErrorCode ierr;
2057 
2058   PetscFunctionBegin;
2059   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2060   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
2061   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
2062   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
2063   PetscFunctionReturn(0);
2064 }
2065 
2066 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2067 {
2068   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2069   const char *const         *slist = PTScotchStrategyList;
2070   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2071   PetscBool                 flag;
2072   PetscErrorCode            ierr;
2073 
2074   PetscFunctionBegin;
2075   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
2076   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
2077   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
2078   ierr = PetscOptionsTail();CHKERRQ(ierr);
2079   PetscFunctionReturn(0);
2080 }
2081 
2082 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
2083 {
2084 #if defined(PETSC_HAVE_PTSCOTCH)
2085   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
2086   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
2087   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
2088   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
2089   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
2090   PetscInt      *vwgt       = NULL;        /* Vertex weights */
2091   PetscInt      *adjwgt     = NULL;        /* Edge weights */
2092   PetscInt       v, i, *assignment, *points;
2093   PetscMPIInt    size, rank, p;
2094   PetscErrorCode ierr;
2095 
2096   PetscFunctionBegin;
2097   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2098   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2099   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
2100 
2101   /* Calculate vertex distribution */
2102   vtxdist[0] = 0;
2103   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2104   for (p = 2; p <= size; ++p) {
2105     vtxdist[p] += vtxdist[p-1];
2106   }
2107 
2108   if (nparts == 1) {
2109     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
2110   } else { /* Weight cells by dofs on cell by default */
2111     PetscSection section;
2112 
2113     /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
2114     /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
2115     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
2116     for (v = 0; v < PetscMax(nvtxs,1); ++v) vwgt[v] = 1;
2117     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
2118     if (section) {
2119       PetscInt vStart, vEnd, dof;
2120       ierr = DMPlexGetHeightStratum(dm, part->height, &vStart, &vEnd);CHKERRQ(ierr);
2121       for (v = vStart; v < vStart + numVertices; ++v) {
2122         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
2123         vwgt[v-vStart] = PetscMax(dof, 1);
2124       }
2125     }
2126     {
2127       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2128       int                       strat = PTScotch_Strategy(pts->strategy);
2129       double                    imbal = (double)pts->imbalance;
2130 
2131       for (p = 0; !vtxdist[p+1] && p < size; ++p);
2132       if (vtxdist[p+1] == vtxdist[size]) {
2133         if (rank == p) {
2134           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
2135         }
2136       } else {
2137         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
2138       }
2139     }
2140     ierr = PetscFree(vwgt);CHKERRQ(ierr);
2141   }
2142 
2143   /* Convert to PetscSection+IS */
2144   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
2145   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2146   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
2147   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2148   for (p = 0, i = 0; p < nparts; ++p) {
2149     for (v = 0; v < nvtxs; ++v) {
2150       if (assignment[v] == p) points[i++] = v;
2151     }
2152   }
2153   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2154   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2155 
2156   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
2157   PetscFunctionReturn(0);
2158 #else
2159   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2160 #endif
2161 }
2162 
2163 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2164 {
2165   PetscFunctionBegin;
2166   part->noGraph             = PETSC_FALSE;
2167   part->ops->view           = PetscPartitionerView_PTScotch;
2168   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2169   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2170   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2171   PetscFunctionReturn(0);
2172 }
2173 
2174 /*MC
2175   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2176 
2177   Level: intermediate
2178 
2179 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2180 M*/
2181 
2182 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2183 {
2184   PetscPartitioner_PTScotch *p;
2185   PetscErrorCode          ierr;
2186 
2187   PetscFunctionBegin;
2188   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2189   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2190   part->data = p;
2191 
2192   p->strategy  = 0;
2193   p->imbalance = 0.01;
2194 
2195   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2196   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2197   PetscFunctionReturn(0);
2198 }
2199 
2200 
2201 /*@
2202   DMPlexGetPartitioner - Get the mesh partitioner
2203 
2204   Not collective
2205 
2206   Input Parameter:
2207 . dm - The DM
2208 
2209   Output Parameter:
2210 . part - The PetscPartitioner
2211 
2212   Level: developer
2213 
2214   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2215 
2216 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
2217 @*/
2218 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2219 {
2220   DM_Plex       *mesh = (DM_Plex *) dm->data;
2221 
2222   PetscFunctionBegin;
2223   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2224   PetscValidPointer(part, 2);
2225   *part = mesh->partitioner;
2226   PetscFunctionReturn(0);
2227 }
2228 
2229 /*@
2230   DMPlexSetPartitioner - Set the mesh partitioner
2231 
2232   logically collective on dm and part
2233 
2234   Input Parameters:
2235 + dm - The DM
2236 - part - The partitioner
2237 
2238   Level: developer
2239 
2240   Note: Any existing PetscPartitioner will be destroyed.
2241 
2242 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2243 @*/
2244 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2245 {
2246   DM_Plex       *mesh = (DM_Plex *) dm->data;
2247   PetscErrorCode ierr;
2248 
2249   PetscFunctionBegin;
2250   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2251   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2252   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2253   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2254   mesh->partitioner = part;
2255   PetscFunctionReturn(0);
2256 }
2257 
2258 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2259 {
2260   PetscErrorCode ierr;
2261 
2262   PetscFunctionBegin;
2263   if (up) {
2264     PetscInt parent;
2265 
2266     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2267     if (parent != point) {
2268       PetscInt closureSize, *closure = NULL, i;
2269 
2270       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2271       for (i = 0; i < closureSize; i++) {
2272         PetscInt cpoint = closure[2*i];
2273 
2274         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2275         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2276       }
2277       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2278     }
2279   }
2280   if (down) {
2281     PetscInt numChildren;
2282     const PetscInt *children;
2283 
2284     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2285     if (numChildren) {
2286       PetscInt i;
2287 
2288       for (i = 0; i < numChildren; i++) {
2289         PetscInt cpoint = children[i];
2290 
2291         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2292         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2293       }
2294     }
2295   }
2296   PetscFunctionReturn(0);
2297 }
2298 
2299 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
2300 
2301 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2302 {
2303   DM_Plex        *mesh = (DM_Plex *)dm->data;
2304   PetscBool      hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2305   PetscInt       *closure = NULL, closureSize, nelems, *elems, off = 0, p, c;
2306   PetscHSetI     ht;
2307   PetscErrorCode ierr;
2308 
2309   PetscFunctionBegin;
2310   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2311   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2312   for (p = 0; p < numPoints; ++p) {
2313     ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2314     for (c = 0; c < closureSize*2; c += 2) {
2315       ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2316       if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2317     }
2318   }
2319   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2320   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2321   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2322   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2323   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2324   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2325   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2326   PetscFunctionReturn(0);
2327 }
2328 
2329 /*@
2330   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2331 
2332   Input Parameters:
2333 + dm     - The DM
2334 - label  - DMLabel assinging ranks to remote roots
2335 
2336   Level: developer
2337 
2338 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2339 @*/
2340 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2341 {
2342   IS              rankIS,   pointIS, closureIS;
2343   const PetscInt *ranks,   *points;
2344   PetscInt        numRanks, numPoints, r;
2345   PetscErrorCode  ierr;
2346 
2347   PetscFunctionBegin;
2348   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2349   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2350   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2351   for (r = 0; r < numRanks; ++r) {
2352     const PetscInt rank = ranks[r];
2353     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2354     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2355     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2356     ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr);
2357     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2358     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2359     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2360     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2361   }
2362   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2363   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2364   PetscFunctionReturn(0);
2365 }
2366 
2367 /*@
2368   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2369 
2370   Input Parameters:
2371 + dm     - The DM
2372 - label  - DMLabel assinging ranks to remote roots
2373 
2374   Level: developer
2375 
2376 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2377 @*/
2378 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2379 {
2380   IS              rankIS,   pointIS;
2381   const PetscInt *ranks,   *points;
2382   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2383   PetscInt       *adj = NULL;
2384   PetscErrorCode  ierr;
2385 
2386   PetscFunctionBegin;
2387   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2388   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2389   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2390   for (r = 0; r < numRanks; ++r) {
2391     const PetscInt rank = ranks[r];
2392 
2393     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2394     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2395     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2396     for (p = 0; p < numPoints; ++p) {
2397       adjSize = PETSC_DETERMINE;
2398       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2399       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2400     }
2401     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2402     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2403   }
2404   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2405   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2406   ierr = PetscFree(adj);CHKERRQ(ierr);
2407   PetscFunctionReturn(0);
2408 }
2409 
2410 /*@
2411   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2412 
2413   Input Parameters:
2414 + dm     - The DM
2415 - label  - DMLabel assinging ranks to remote roots
2416 
2417   Level: developer
2418 
2419   Note: This is required when generating multi-level overlaps to capture
2420   overlap points from non-neighbouring partitions.
2421 
2422 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2423 @*/
2424 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2425 {
2426   MPI_Comm        comm;
2427   PetscMPIInt     rank;
2428   PetscSF         sfPoint;
2429   DMLabel         lblRoots, lblLeaves;
2430   IS              rankIS, pointIS;
2431   const PetscInt *ranks;
2432   PetscInt        numRanks, r;
2433   PetscErrorCode  ierr;
2434 
2435   PetscFunctionBegin;
2436   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2437   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2438   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2439   /* Pull point contributions from remote leaves into local roots */
2440   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2441   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2442   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2443   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2444   for (r = 0; r < numRanks; ++r) {
2445     const PetscInt remoteRank = ranks[r];
2446     if (remoteRank == rank) continue;
2447     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2448     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2449     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2450   }
2451   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2452   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2453   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2454   /* Push point contributions from roots into remote leaves */
2455   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2456   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2457   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2458   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2459   for (r = 0; r < numRanks; ++r) {
2460     const PetscInt remoteRank = ranks[r];
2461     if (remoteRank == rank) continue;
2462     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2463     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2464     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2465   }
2466   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2467   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2468   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2469   PetscFunctionReturn(0);
2470 }
2471 
2472 /*@
2473   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2474 
2475   Input Parameters:
2476 + dm        - The DM
2477 . rootLabel - DMLabel assinging ranks to local roots
2478 . processSF - A star forest mapping into the local index on each remote rank
2479 
2480   Output Parameter:
2481 - leafLabel - DMLabel assinging ranks to remote roots
2482 
2483   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2484   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2485 
2486   Level: developer
2487 
2488 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2489 @*/
2490 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2491 {
2492   MPI_Comm           comm;
2493   PetscMPIInt        rank, size, r;
2494   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2495   PetscSF            sfPoint;
2496   PetscSection       rootSection;
2497   PetscSFNode       *rootPoints, *leafPoints;
2498   const PetscSFNode *remote;
2499   const PetscInt    *local, *neighbors;
2500   IS                 valueIS;
2501   PetscBool          mpiOverflow = PETSC_FALSE;
2502   PetscErrorCode     ierr;
2503 
2504   PetscFunctionBegin;
2505   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2506   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2507   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2508   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2509 
2510   /* Convert to (point, rank) and use actual owners */
2511   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2512   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2513   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2514   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2515   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2516   for (n = 0; n < numNeighbors; ++n) {
2517     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2518     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2519   }
2520   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2521   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2522   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2523   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2524   for (n = 0; n < numNeighbors; ++n) {
2525     IS              pointIS;
2526     const PetscInt *points;
2527 
2528     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2529     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2530     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2531     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2532     for (p = 0; p < numPoints; ++p) {
2533       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2534       else       {l = -1;}
2535       if (l >= 0) {rootPoints[off+p] = remote[l];}
2536       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2537     }
2538     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2539     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2540   }
2541 
2542   /* Try to communicate overlap using All-to-All */
2543   if (!processSF) {
2544     PetscInt64  counter = 0;
2545     PetscBool   locOverflow = PETSC_FALSE;
2546     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2547 
2548     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2549     for (n = 0; n < numNeighbors; ++n) {
2550       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2551       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2552 #if defined(PETSC_USE_64BIT_INDICES)
2553       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2554       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2555 #endif
2556       scounts[neighbors[n]] = (PetscMPIInt) dof;
2557       sdispls[neighbors[n]] = (PetscMPIInt) off;
2558     }
2559     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2560     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2561     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2562     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2563     if (!mpiOverflow) {
2564       leafSize = (PetscInt) counter;
2565       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2566       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2567     }
2568     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2569   }
2570 
2571   /* Communicate overlap using process star forest */
2572   if (processSF || mpiOverflow) {
2573     PetscSF      procSF;
2574     PetscSFNode  *remote;
2575     PetscSection leafSection;
2576 
2577     if (processSF) {
2578       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2579       procSF = processSF;
2580     } else {
2581       ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr);
2582       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2583       ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr);
2584       ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2585     }
2586 
2587     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2588     ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2589     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2590     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2591     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2592   }
2593 
2594   for (p = 0; p < leafSize; p++) {
2595     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2596   }
2597 
2598   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2599   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2600   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2601   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2602   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2603   PetscFunctionReturn(0);
2604 }
2605 
2606 /*@
2607   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2608 
2609   Input Parameters:
2610 + dm    - The DM
2611 . label - DMLabel assinging ranks to remote roots
2612 
2613   Output Parameter:
2614 - sf    - The star forest communication context encapsulating the defined mapping
2615 
2616   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2617 
2618   Level: developer
2619 
2620 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2621 @*/
2622 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2623 {
2624   PetscMPIInt     rank, size;
2625   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2626   PetscSFNode    *remotePoints;
2627   IS              remoteRootIS;
2628   const PetscInt *remoteRoots;
2629   PetscErrorCode ierr;
2630 
2631   PetscFunctionBegin;
2632   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2633   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2634 
2635   for (numRemote = 0, n = 0; n < size; ++n) {
2636     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2637     numRemote += numPoints;
2638   }
2639   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2640   /* Put owned points first */
2641   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2642   if (numPoints > 0) {
2643     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2644     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2645     for (p = 0; p < numPoints; p++) {
2646       remotePoints[idx].index = remoteRoots[p];
2647       remotePoints[idx].rank = rank;
2648       idx++;
2649     }
2650     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2651     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2652   }
2653   /* Now add remote points */
2654   for (n = 0; n < size; ++n) {
2655     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2656     if (numPoints <= 0 || n == rank) continue;
2657     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2658     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2659     for (p = 0; p < numPoints; p++) {
2660       remotePoints[idx].index = remoteRoots[p];
2661       remotePoints[idx].rank = n;
2662       idx++;
2663     }
2664     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2665     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2666   }
2667   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2668   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2669   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2670   PetscFunctionReturn(0);
2671 }
2672 
2673 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
2674  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
2675  * them out in that case. */
2676 #if defined(PETSC_HAVE_PARMETIS)
2677 /*@C
2678 
2679   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
2680 
2681   Input parameters:
2682   + dm                - The DMPlex object.
2683   + n                 - The number of points.
2684   + pointsToRewrite   - The points in the SF whose ownership will change.
2685   + targetOwners      - New owner for each element in pointsToRewrite.
2686   + degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
2687 
2688   Level: developer
2689 
2690 @*/
2691 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
2692 {
2693   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
2694   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
2695   PetscSFNode  *leafLocationsNew;
2696   const         PetscSFNode *iremote;
2697   const         PetscInt *ilocal;
2698   PetscBool    *isLeaf;
2699   PetscSF       sf;
2700   MPI_Comm      comm;
2701   PetscMPIInt   rank, size;
2702 
2703   PetscFunctionBegin;
2704   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2705   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2706   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2707   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2708 
2709   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2710   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2711   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2712   for (i=0; i<pEnd-pStart; i++) {
2713     isLeaf[i] = PETSC_FALSE;
2714   }
2715   for (i=0; i<nleafs; i++) {
2716     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2717   }
2718 
2719   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
2720   cumSumDegrees[0] = 0;
2721   for (i=1; i<=pEnd-pStart; i++) {
2722     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
2723   }
2724   sumDegrees = cumSumDegrees[pEnd-pStart];
2725   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
2726 
2727   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
2728   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
2729   for (i=0; i<pEnd-pStart; i++) {
2730     rankOnLeafs[i] = rank;
2731   }
2732   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2733   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2734   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
2735 
2736   /* get the remote local points of my leaves */
2737   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
2738   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
2739   for (i=0; i<pEnd-pStart; i++) {
2740     points[i] = pStart+i;
2741   }
2742   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2743   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2744   ierr = PetscFree(points);CHKERRQ(ierr);
2745   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
2746   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
2747   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
2748   for (i=0; i<pEnd-pStart; i++) {
2749     newOwners[i] = -1;
2750     newNumbers[i] = -1;
2751   }
2752   {
2753     PetscInt oldNumber, newNumber, oldOwner, newOwner;
2754     for (i=0; i<n; i++) {
2755       oldNumber = pointsToRewrite[i];
2756       newNumber = -1;
2757       oldOwner = rank;
2758       newOwner = targetOwners[i];
2759       if (oldOwner == newOwner) {
2760         newNumber = oldNumber;
2761       } else {
2762         for (j=0; j<degrees[oldNumber]; j++) {
2763           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
2764             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
2765             break;
2766           }
2767         }
2768       }
2769       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
2770 
2771       newOwners[oldNumber] = newOwner;
2772       newNumbers[oldNumber] = newNumber;
2773     }
2774   }
2775   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
2776   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
2777   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
2778 
2779   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2780   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2781   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2782   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2783 
2784   /* Now count how many leafs we have on each processor. */
2785   leafCounter=0;
2786   for (i=0; i<pEnd-pStart; i++) {
2787     if (newOwners[i] >= 0) {
2788       if (newOwners[i] != rank) {
2789         leafCounter++;
2790       }
2791     } else {
2792       if (isLeaf[i]) {
2793         leafCounter++;
2794       }
2795     }
2796   }
2797 
2798   /* Now set up the new sf by creating the leaf arrays */
2799   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
2800   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
2801 
2802   leafCounter = 0;
2803   counter = 0;
2804   for (i=0; i<pEnd-pStart; i++) {
2805     if (newOwners[i] >= 0) {
2806       if (newOwners[i] != rank) {
2807         leafsNew[leafCounter] = i;
2808         leafLocationsNew[leafCounter].rank = newOwners[i];
2809         leafLocationsNew[leafCounter].index = newNumbers[i];
2810         leafCounter++;
2811       }
2812     } else {
2813       if (isLeaf[i]) {
2814         leafsNew[leafCounter] = i;
2815         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
2816         leafLocationsNew[leafCounter].index = iremote[counter].index;
2817         leafCounter++;
2818       }
2819     }
2820     if (isLeaf[i]) {
2821       counter++;
2822     }
2823   }
2824 
2825   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
2826   ierr = PetscFree(newOwners);CHKERRQ(ierr);
2827   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
2828   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
2829   PetscFunctionReturn(0);
2830 }
2831 
2832 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
2833 {
2834   PetscInt *distribution, min, max, sum, i, ierr;
2835   PetscMPIInt rank, size;
2836   PetscFunctionBegin;
2837   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2838   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2839   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
2840   for (i=0; i<n; i++) {
2841     if (part) distribution[part[i]] += vtxwgt[skip*i];
2842     else distribution[rank] += vtxwgt[skip*i];
2843   }
2844   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2845   min = distribution[0];
2846   max = distribution[0];
2847   sum = distribution[0];
2848   for (i=1; i<size; i++) {
2849     if (distribution[i]<min) min=distribution[i];
2850     if (distribution[i]>max) max=distribution[i];
2851     sum += distribution[i];
2852   }
2853   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
2854   ierr = PetscFree(distribution);CHKERRQ(ierr);
2855   PetscFunctionReturn(0);
2856 }
2857 
2858 #endif
2859 
2860 /*@
2861   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
2862 
2863   Input parameters:
2864   + dm               - The DMPlex object.
2865   + entityDepth      - depth of the entity to balance (0 -> balance vertices).
2866   + useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
2867   + parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
2868 
2869   Output parameters:
2870   + success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
2871 
2872   Level: user
2873 
2874 @*/
2875 
2876 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
2877 {
2878 #if defined(PETSC_HAVE_PARMETIS)
2879   PetscSF     sf;
2880   PetscInt    ierr, i, j, idx, jdx;
2881   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
2882   const       PetscInt *degrees, *ilocal;
2883   const       PetscSFNode *iremote;
2884   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
2885   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
2886   PetscMPIInt rank, size;
2887   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
2888   const       PetscInt *cumSumVertices;
2889   PetscInt    offset, counter;
2890   PetscInt    lenadjncy;
2891   PetscInt    *xadj, *adjncy, *vtxwgt;
2892   PetscInt    lenxadj;
2893   PetscInt    *adjwgt = NULL;
2894   PetscInt    *part, *options;
2895   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
2896   real_t      *ubvec;
2897   PetscInt    *firstVertices, *renumbering;
2898   PetscInt    failed, failedGlobal;
2899   MPI_Comm    comm;
2900   Mat         A, Apre;
2901   const char *prefix = NULL;
2902   PetscViewer       viewer;
2903   PetscViewerFormat format;
2904   PetscLayout layout;
2905 
2906   PetscFunctionBegin;
2907   if (success) *success = PETSC_FALSE;
2908   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2909   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2910   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2911   if (size==1) PetscFunctionReturn(0);
2912 
2913   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
2914 
2915   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
2916   if (viewer) {
2917     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
2918   }
2919 
2920   /* Figure out all points in the plex that we are interested in balancing. */
2921   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
2922   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2923   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
2924 
2925   for (i=0; i<pEnd-pStart; i++) {
2926     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
2927   }
2928 
2929   /* There are three types of points:
2930    * exclusivelyOwned: points that are owned by this process and only seen by this process
2931    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
2932    * leaf: a point that is seen by this process but owned by a different process
2933    */
2934   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2935   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2936   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2937   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
2938   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
2939   for (i=0; i<pEnd-pStart; i++) {
2940     isNonExclusivelyOwned[i] = PETSC_FALSE;
2941     isExclusivelyOwned[i] = PETSC_FALSE;
2942     isLeaf[i] = PETSC_FALSE;
2943   }
2944 
2945   /* start by marking all the leafs */
2946   for (i=0; i<nleafs; i++) {
2947     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2948   }
2949 
2950   /* for an owned point, we can figure out whether another processor sees it or
2951    * not by calculating its degree */
2952   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
2953   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
2954 
2955   numExclusivelyOwned = 0;
2956   numNonExclusivelyOwned = 0;
2957   for (i=0; i<pEnd-pStart; i++) {
2958     if (toBalance[i]) {
2959       if (degrees[i] > 0) {
2960         isNonExclusivelyOwned[i] = PETSC_TRUE;
2961         numNonExclusivelyOwned += 1;
2962       } else {
2963         if (!isLeaf[i]) {
2964           isExclusivelyOwned[i] = PETSC_TRUE;
2965           numExclusivelyOwned += 1;
2966         }
2967       }
2968     }
2969   }
2970 
2971   /* We are going to build a graph with one vertex per core representing the
2972    * exclusively owned points and then one vertex per nonExclusively owned
2973    * point. */
2974 
2975   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2976   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
2977   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2978   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
2979 
2980   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
2981   offset = cumSumVertices[rank];
2982   counter = 0;
2983   for (i=0; i<pEnd-pStart; i++) {
2984     if (toBalance[i]) {
2985       if (degrees[i] > 0) {
2986         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
2987         counter++;
2988       }
2989     }
2990   }
2991 
2992   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
2993   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
2994   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
2995   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
2996 
2997   /* Now start building the data structure for ParMETIS */
2998 
2999   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
3000   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
3001   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3002   ierr = MatSetUp(Apre);CHKERRQ(ierr);
3003 
3004   for (i=0; i<pEnd-pStart; i++) {
3005     if (toBalance[i]) {
3006       idx = cumSumVertices[rank];
3007       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3008       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3009       else continue;
3010       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3011       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3012     }
3013   }
3014 
3015   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3016   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3017 
3018   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
3019   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
3020   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3021   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
3022   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
3023 
3024   for (i=0; i<pEnd-pStart; i++) {
3025     if (toBalance[i]) {
3026       idx = cumSumVertices[rank];
3027       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3028       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3029       else continue;
3030       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3031       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3032     }
3033   }
3034 
3035   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3036   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3037   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
3038   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3039 
3040   nparts = size;
3041   wgtflag = 2;
3042   numflag = 0;
3043   ncon = 2;
3044   real_t *tpwgts;
3045   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
3046   for (i=0; i<ncon*nparts; i++) {
3047     tpwgts[i] = 1./(nparts);
3048   }
3049 
3050   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
3051   ubvec[0] = 1.01;
3052   ubvec[1] = 1.01;
3053   lenadjncy = 0;
3054   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3055     PetscInt temp=0;
3056     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3057     lenadjncy += temp;
3058     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3059   }
3060   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
3061   lenxadj = 2 + numNonExclusivelyOwned;
3062   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
3063   xadj[0] = 0;
3064   counter = 0;
3065   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3066     PetscInt        temp=0;
3067     const PetscInt *cols;
3068     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3069     ierr = PetscMemcpy(&adjncy[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
3070     counter += temp;
3071     xadj[i+1] = counter;
3072     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3073   }
3074 
3075   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
3076   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
3077   vtxwgt[0] = numExclusivelyOwned;
3078   if (ncon>1) vtxwgt[1] = 1;
3079   for (i=0; i<numNonExclusivelyOwned; i++) {
3080     vtxwgt[ncon*(i+1)] = 1;
3081     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3082   }
3083 
3084   if (viewer) {
3085     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
3086     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
3087   }
3088   if (parallel) {
3089     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
3090     options[0] = 1;
3091     options[1] = 0; /* Verbosity */
3092     options[2] = 0; /* Seed */
3093     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3094     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
3095     if (useInitialGuess) {
3096       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
3097       PetscStackPush("ParMETIS_V3_RefineKway");
3098       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3099       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3100       PetscStackPop;
3101     } else {
3102       PetscStackPush("ParMETIS_V3_PartKway");
3103       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3104       PetscStackPop;
3105       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3106     }
3107     ierr = PetscFree(options);CHKERRQ(ierr);
3108   } else {
3109     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
3110     Mat As;
3111     PetscInt numRows;
3112     PetscInt *partGlobal;
3113     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
3114 
3115     PetscInt *numExclusivelyOwnedAll;
3116     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
3117     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3118     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
3119 
3120     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
3121     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
3122     if (!rank) {
3123       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3124       lenadjncy = 0;
3125 
3126       for (i=0; i<numRows; i++) {
3127         PetscInt temp=0;
3128         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3129         lenadjncy += temp;
3130         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3131       }
3132       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
3133       lenxadj = 1 + numRows;
3134       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
3135       xadj_g[0] = 0;
3136       counter = 0;
3137       for (i=0; i<numRows; i++) {
3138         PetscInt        temp=0;
3139         const PetscInt *cols;
3140         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3141         ierr = PetscMemcpy(&adjncy_g[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
3142         counter += temp;
3143         xadj_g[i+1] = counter;
3144         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3145       }
3146       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
3147       for (i=0; i<size; i++){
3148         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3149         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3150         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3151           vtxwgt_g[ncon*j] = 1;
3152           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3153         }
3154       }
3155       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
3156       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
3157       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3158       options[METIS_OPTION_CONTIG] = 1;
3159       PetscStackPush("METIS_PartGraphKway");
3160       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3161       PetscStackPop;
3162       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3163       ierr = PetscFree(options);CHKERRQ(ierr);
3164       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
3165       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
3166       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
3167     }
3168     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
3169 
3170     /* Now scatter the parts array. */
3171     {
3172       PetscMPIInt *counts, *mpiCumSumVertices;
3173       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
3174       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
3175       for(i=0; i<size; i++) {
3176         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3177       }
3178       for(i=0; i<=size; i++) {
3179         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3180       }
3181       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3182       ierr = PetscFree(counts);CHKERRQ(ierr);
3183       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3184     }
3185 
3186     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3187     ierr = MatDestroy(&As);CHKERRQ(ierr);
3188   }
3189 
3190   ierr = MatDestroy(&A);CHKERRQ(ierr);
3191   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3192   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3193 
3194   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3195 
3196   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3197   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3198   firstVertices[rank] = part[0];
3199   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3200   for (i=0; i<size; i++) {
3201     renumbering[firstVertices[i]] = i;
3202   }
3203   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3204     part[i] = renumbering[part[i]];
3205   }
3206   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3207   failed = (PetscInt)(part[0] != rank);
3208   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3209 
3210   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3211   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3212 
3213   if (failedGlobal > 0) {
3214     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3215     ierr = PetscFree(xadj);CHKERRQ(ierr);
3216     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3217     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3218     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3219     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3220     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3221     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3222     ierr = PetscFree(part);CHKERRQ(ierr);
3223     if (viewer) {
3224       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3225       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3226     }
3227     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3228     PetscFunctionReturn(0);
3229   }
3230 
3231   /*Let's check how well we did distributing points*/
3232   if (viewer) {
3233     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3234     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3235     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3236     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3237     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3238   }
3239 
3240   /* Now check that every vertex is owned by a process that it is actually connected to. */
3241   for (i=1; i<=numNonExclusivelyOwned; i++) {
3242     PetscInt loc = 0;
3243     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3244     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3245     if (loc<0) {
3246       part[i] = rank;
3247     }
3248   }
3249 
3250   /* Let's see how significant the influences of the previous fixing up step was.*/
3251   if (viewer) {
3252     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3253     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3254   }
3255 
3256   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3257   ierr = PetscFree(xadj);CHKERRQ(ierr);
3258   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3259   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3260 
3261   /* Almost done, now rewrite the SF to reflect the new ownership. */
3262   {
3263     PetscInt *pointsToRewrite;
3264     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3265     counter = 0;
3266     for(i=0; i<pEnd-pStart; i++) {
3267       if (toBalance[i]) {
3268         if (isNonExclusivelyOwned[i]) {
3269           pointsToRewrite[counter] = i + pStart;
3270           counter++;
3271         }
3272       }
3273     }
3274     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3275     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3276   }
3277 
3278   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3279   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3280   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3281   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3282   ierr = PetscFree(part);CHKERRQ(ierr);
3283   if (viewer) {
3284     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3285     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3286   }
3287   if (success) *success = PETSC_TRUE;
3288   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3289 #else
3290   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3291 #endif
3292   PetscFunctionReturn(0);
3293 }
3294