xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 60dd46ca9875962950ab5c8a8f4d94fdb4c0f129)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {https://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
31 
32 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
33 {
34   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
35   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
36   IS             cellNumbering;
37   const PetscInt *cellNum;
38   PetscBool      useCone, useClosure;
39   PetscSection   section;
40   PetscSegBuffer adjBuffer;
41   PetscSF        sfPoint;
42   PetscInt       *adjCells = NULL, *remoteCells = NULL;
43   const PetscInt *local;
44   PetscInt       nroots, nleaves, l;
45   PetscMPIInt    rank;
46   PetscErrorCode ierr;
47 
48   PetscFunctionBegin;
49   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
50   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
51   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
52   if (dim != depth) {
53     /* We do not handle the uninterpolated case here */
54     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
55     /* DMPlexCreateNeighborCSR does not make a numbering */
56     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
57     /* Different behavior for empty graphs */
58     if (!*numVertices) {
59       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
60       (*offsets)[0] = 0;
61     }
62     /* Broken in parallel */
63     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
64     PetscFunctionReturn(0);
65   }
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
74   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
75   ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
76   if (globalNumbering) {
77     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
78     *globalNumbering = cellNumbering;
79   }
80   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
82      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
83    */
84   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
85   if (nroots >= 0) {
86     PetscInt fStart, fEnd, f;
87 
88     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
89     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
90     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
91     for (f = fStart; f < fEnd; ++f) {
92       const PetscInt *support;
93       PetscInt        supportSize;
94 
95       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
96       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
97       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
98       else if (supportSize == 2) {
99         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
100         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
101         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
102         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
103       }
104       /* Handle non-conforming meshes */
105       if (supportSize > 2) {
106         PetscInt        numChildren, i;
107         const PetscInt *children;
108 
109         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
110         for (i = 0; i < numChildren; ++i) {
111           const PetscInt child = children[i];
112           if (fStart <= child && child < fEnd) {
113             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
114             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
115             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
116             else if (supportSize == 2) {
117               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
118               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
119               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
120               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
121             }
122           }
123         }
124       }
125     }
126     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
127     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
128     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
129     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
130     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
131   }
132   /* Combine local and global adjacencies */
133   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
134     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
135     if (nroots > 0) {if (cellNum[p] < 0) continue;}
136     /* Add remote cells */
137     if (remoteCells) {
138       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
139       PetscInt       coneSize, numChildren, c, i;
140       const PetscInt *cone, *children;
141 
142       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
143       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
144       for (c = 0; c < coneSize; ++c) {
145         const PetscInt point = cone[c];
146         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
147           PetscInt *PETSC_RESTRICT pBuf;
148           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
149           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
150           *pBuf = remoteCells[point];
151         }
152         /* Handle non-conforming meshes */
153         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
154         for (i = 0; i < numChildren; ++i) {
155           const PetscInt child = children[i];
156           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
157             PetscInt *PETSC_RESTRICT pBuf;
158             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
159             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
160             *pBuf = remoteCells[child];
161           }
162         }
163       }
164     }
165     /* Add local cells */
166     adjSize = PETSC_DETERMINE;
167     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
168     for (a = 0; a < adjSize; ++a) {
169       const PetscInt point = adj[a];
170       if (point != p && pStart <= point && point < pEnd) {
171         PetscInt *PETSC_RESTRICT pBuf;
172         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
173         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
174         *pBuf = DMPlex_GlobalID(cellNum[point]);
175       }
176     }
177     (*numVertices)++;
178   }
179   ierr = PetscFree(adj);CHKERRQ(ierr);
180   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
181   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
182 
183   /* Derive CSR graph from section/segbuffer */
184   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
185   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
186   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
187   for (idx = 0, p = pStart; p < pEnd; p++) {
188     if (nroots > 0) {if (cellNum[p] < 0) continue;}
189     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
190   }
191   vOffsets[*numVertices] = size;
192   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
193 
194   if (nroots >= 0) {
195     /* Filter out duplicate edges using section/segbuffer */
196     ierr = PetscSectionReset(section);CHKERRQ(ierr);
197     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
198     for (p = 0; p < *numVertices; p++) {
199       PetscInt start = vOffsets[p], end = vOffsets[p+1];
200       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
201       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
202       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
203       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
204       ierr = PetscArraycpy(edges, &graph[start], numEdges);CHKERRQ(ierr);
205     }
206     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
207     ierr = PetscFree(graph);CHKERRQ(ierr);
208     /* Derive CSR graph from section/segbuffer */
209     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
210     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
211     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
212     for (idx = 0, p = 0; p < *numVertices; p++) {
213       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
214     }
215     vOffsets[*numVertices] = size;
216     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
217   } else {
218     /* Sort adjacencies (not strictly necessary) */
219     for (p = 0; p < *numVertices; p++) {
220       PetscInt start = vOffsets[p], end = vOffsets[p+1];
221       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
222     }
223   }
224 
225   if (offsets) *offsets = vOffsets;
226   if (adjacency) *adjacency = graph;
227 
228   /* Cleanup */
229   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
230   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
231   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
232   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
233   PetscFunctionReturn(0);
234 }
235 
236 static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
237 {
238   Mat            conn, CSR;
239   IS             fis, cis, cis_own;
240   PetscSF        sfPoint;
241   const PetscInt *rows, *cols, *ii, *jj;
242   PetscInt       *idxs,*idxs2;
243   PetscInt       dim, depth, floc, cloc, i, M, N, c, m, cStart, cEnd, fStart, fEnd;
244   PetscMPIInt    rank;
245   PetscBool      flg;
246   PetscErrorCode ierr;
247 
248   PetscFunctionBegin;
249   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
250   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
251   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
252   if (dim != depth) {
253     /* We do not handle the uninterpolated case here */
254     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
255     /* DMPlexCreateNeighborCSR does not make a numbering */
256     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
257     /* Different behavior for empty graphs */
258     if (!*numVertices) {
259       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
260       (*offsets)[0] = 0;
261     }
262     /* Broken in parallel */
263     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
264     PetscFunctionReturn(0);
265   }
266   /* Interpolated and parallel case */
267 
268   /* numbering */
269   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
270   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
271   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
272   ierr = DMPlexCreateNumbering_Internal(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
273   ierr = DMPlexCreateNumbering_Internal(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
274   if (globalNumbering) {
275     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
276   }
277 
278   /* get positive global ids and local sizes for facets and cells */
279   ierr = ISGetLocalSize(fis, &m);CHKERRQ(ierr);
280   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
281   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
282   for (i = 0, floc = 0; i < m; i++) {
283     const PetscInt p = rows[i];
284 
285     if (p < 0) {
286       idxs[i] = -(p+1);
287     } else {
288       idxs[i] = p;
289       floc   += 1;
290     }
291   }
292   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
293   ierr = ISDestroy(&fis);CHKERRQ(ierr);
294   ierr = ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);CHKERRQ(ierr);
295 
296   ierr = ISGetLocalSize(cis, &m);CHKERRQ(ierr);
297   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
298   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
299   ierr = PetscMalloc1(m, &idxs2);CHKERRQ(ierr);
300   for (i = 0, cloc = 0; i < m; i++) {
301     const PetscInt p = cols[i];
302 
303     if (p < 0) {
304       idxs[i] = -(p+1);
305     } else {
306       idxs[i]       = p;
307       idxs2[cloc++] = p;
308     }
309   }
310   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
311   ierr = ISDestroy(&cis);CHKERRQ(ierr);
312   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);CHKERRQ(ierr);
313   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);CHKERRQ(ierr);
314 
315   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
316   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
317   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
318   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
319   ierr = DMPlexGetMaxSizes(dm, NULL, &m);CHKERRQ(ierr);
320   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
321 
322   /* Assemble matrix */
323   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
324   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
325   for (c = cStart; c < cEnd; c++) {
326     const PetscInt *cone;
327     PetscInt        coneSize, row, col, f;
328 
329     col  = cols[c-cStart];
330     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
331     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
332     for (f = 0; f < coneSize; f++) {
333       const PetscScalar v = 1.0;
334       const PetscInt *children;
335       PetscInt        numChildren, ch;
336 
337       row  = rows[cone[f]-fStart];
338       ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
339 
340       /* non-conforming meshes */
341       ierr = DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);CHKERRQ(ierr);
342       for (ch = 0; ch < numChildren; ch++) {
343         const PetscInt child = children[ch];
344 
345         if (child < fStart || child >= fEnd) continue;
346         row  = rows[child-fStart];
347         ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
348       }
349     }
350   }
351   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
352   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
353   ierr = ISDestroy(&fis);CHKERRQ(ierr);
354   ierr = ISDestroy(&cis);CHKERRQ(ierr);
355   ierr = MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
356   ierr = MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
357 
358   /* Get parallel CSR by doing conn^T * conn */
359   ierr = MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);CHKERRQ(ierr);
360   ierr = MatDestroy(&conn);CHKERRQ(ierr);
361 
362   /* extract local part of the CSR */
363   ierr = MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);CHKERRQ(ierr);
364   ierr = MatDestroy(&CSR);CHKERRQ(ierr);
365   ierr = MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
366   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
367 
368   /* get back requested output */
369   if (numVertices) *numVertices = m;
370   if (offsets) {
371     ierr = PetscCalloc1(m+1, &idxs);CHKERRQ(ierr);
372     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
373     *offsets = idxs;
374   }
375   if (adjacency) {
376     ierr = PetscMalloc1(ii[m] - m, &idxs);CHKERRQ(ierr);
377     ierr = ISGetIndices(cis_own, &rows);CHKERRQ(ierr);
378     for (i = 0, c = 0; i < m; i++) {
379       PetscInt j, g = rows[i];
380 
381       for (j = ii[i]; j < ii[i+1]; j++) {
382         if (jj[j] == g) continue; /* again, self-connectivity */
383         idxs[c++] = jj[j];
384       }
385     }
386     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
387     ierr = ISRestoreIndices(cis_own, &rows);CHKERRQ(ierr);
388     *adjacency = idxs;
389   }
390 
391   /* cleanup */
392   ierr = ISDestroy(&cis_own);CHKERRQ(ierr);
393   ierr = MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
394   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
395   ierr = MatDestroy(&conn);CHKERRQ(ierr);
396   PetscFunctionReturn(0);
397 }
398 
399 /*@C
400   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
401 
402   Input Parameters:
403 + dm      - The mesh DM dm
404 - height  - Height of the strata from which to construct the graph
405 
406   Output Parameter:
407 + numVertices     - Number of vertices in the graph
408 . offsets         - Point offsets in the graph
409 . adjacency       - Point connectivity in the graph
410 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
411 
412   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
413   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().
414 
415   Level: developer
416 
417 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
418 @*/
419 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
420 {
421   PetscErrorCode ierr;
422   PetscBool      usemat = PETSC_FALSE;
423 
424   PetscFunctionBegin;
425   ierr = PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);CHKERRQ(ierr);
426   if (usemat) {
427     ierr = DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
428   } else {
429     ierr = DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
430   }
431   PetscFunctionReturn(0);
432 }
433 
434 /*@C
435   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
436 
437   Collective on DM
438 
439   Input Arguments:
440 + dm - The DMPlex
441 - cellHeight - The height of mesh points to treat as cells (default should be 0)
442 
443   Output Arguments:
444 + numVertices - The number of local vertices in the graph, or cells in the mesh.
445 . offsets     - The offset to the adjacency list for each cell
446 - adjacency   - The adjacency list for all cells
447 
448   Note: This is suitable for input to a mesh partitioner like ParMetis.
449 
450   Level: advanced
451 
452 .seealso: DMPlexCreate()
453 @*/
454 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
455 {
456   const PetscInt maxFaceCases = 30;
457   PetscInt       numFaceCases = 0;
458   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
459   PetscInt      *off, *adj;
460   PetscInt      *neighborCells = NULL;
461   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   /* For parallel partitioning, I think you have to communicate supports */
466   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
467   cellDim = dim - cellHeight;
468   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
469   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
470   if (cEnd - cStart == 0) {
471     if (numVertices) *numVertices = 0;
472     if (offsets)   *offsets   = NULL;
473     if (adjacency) *adjacency = NULL;
474     PetscFunctionReturn(0);
475   }
476   numCells  = cEnd - cStart;
477   faceDepth = depth - cellHeight;
478   if (dim == depth) {
479     PetscInt f, fStart, fEnd;
480 
481     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
482     /* Count neighboring cells */
483     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
484     for (f = fStart; f < fEnd; ++f) {
485       const PetscInt *support;
486       PetscInt        supportSize;
487       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
488       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
489       if (supportSize == 2) {
490         PetscInt numChildren;
491 
492         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
493         if (!numChildren) {
494           ++off[support[0]-cStart+1];
495           ++off[support[1]-cStart+1];
496         }
497       }
498     }
499     /* Prefix sum */
500     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
501     if (adjacency) {
502       PetscInt *tmp;
503 
504       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
505       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
506       ierr = PetscArraycpy(tmp, off, numCells+1);CHKERRQ(ierr);
507       /* Get neighboring cells */
508       for (f = fStart; f < fEnd; ++f) {
509         const PetscInt *support;
510         PetscInt        supportSize;
511         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
512         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
513         if (supportSize == 2) {
514           PetscInt numChildren;
515 
516           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
517           if (!numChildren) {
518             adj[tmp[support[0]-cStart]++] = support[1];
519             adj[tmp[support[1]-cStart]++] = support[0];
520           }
521         }
522       }
523 #if defined(PETSC_USE_DEBUG)
524       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
525 #endif
526       ierr = PetscFree(tmp);CHKERRQ(ierr);
527     }
528     if (numVertices) *numVertices = numCells;
529     if (offsets)   *offsets   = off;
530     if (adjacency) *adjacency = adj;
531     PetscFunctionReturn(0);
532   }
533   /* Setup face recognition */
534   if (faceDepth == 1) {
535     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
536 
537     for (c = cStart; c < cEnd; ++c) {
538       PetscInt corners;
539 
540       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
541       if (!cornersSeen[corners]) {
542         PetscInt nFV;
543 
544         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
545         cornersSeen[corners] = 1;
546 
547         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
548 
549         numFaceVertices[numFaceCases++] = nFV;
550       }
551     }
552   }
553   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
554   /* Count neighboring cells */
555   for (cell = cStart; cell < cEnd; ++cell) {
556     PetscInt numNeighbors = PETSC_DETERMINE, n;
557 
558     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
559     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
560     for (n = 0; n < numNeighbors; ++n) {
561       PetscInt        cellPair[2];
562       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
563       PetscInt        meetSize = 0;
564       const PetscInt *meet    = NULL;
565 
566       cellPair[0] = cell; cellPair[1] = neighborCells[n];
567       if (cellPair[0] == cellPair[1]) continue;
568       if (!found) {
569         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
570         if (meetSize) {
571           PetscInt f;
572 
573           for (f = 0; f < numFaceCases; ++f) {
574             if (numFaceVertices[f] == meetSize) {
575               found = PETSC_TRUE;
576               break;
577             }
578           }
579         }
580         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
581       }
582       if (found) ++off[cell-cStart+1];
583     }
584   }
585   /* Prefix sum */
586   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
587 
588   if (adjacency) {
589     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
590     /* Get neighboring cells */
591     for (cell = cStart; cell < cEnd; ++cell) {
592       PetscInt numNeighbors = PETSC_DETERMINE, n;
593       PetscInt cellOffset   = 0;
594 
595       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
596       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
597       for (n = 0; n < numNeighbors; ++n) {
598         PetscInt        cellPair[2];
599         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
600         PetscInt        meetSize = 0;
601         const PetscInt *meet    = NULL;
602 
603         cellPair[0] = cell; cellPair[1] = neighborCells[n];
604         if (cellPair[0] == cellPair[1]) continue;
605         if (!found) {
606           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
607           if (meetSize) {
608             PetscInt f;
609 
610             for (f = 0; f < numFaceCases; ++f) {
611               if (numFaceVertices[f] == meetSize) {
612                 found = PETSC_TRUE;
613                 break;
614               }
615             }
616           }
617           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
618         }
619         if (found) {
620           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
621           ++cellOffset;
622         }
623       }
624     }
625   }
626   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
627   if (numVertices) *numVertices = numCells;
628   if (offsets)   *offsets   = off;
629   if (adjacency) *adjacency = adj;
630   PetscFunctionReturn(0);
631 }
632 
633 /*@C
634   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
635 
636   Not Collective
637 
638   Input Parameters:
639 + name        - The name of a new user-defined creation routine
640 - create_func - The creation routine itself
641 
642   Notes:
643   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
644 
645   Sample usage:
646 .vb
647     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
648 .ve
649 
650   Then, your PetscPartitioner type can be chosen with the procedural interface via
651 .vb
652     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
653     PetscPartitionerSetType(PetscPartitioner, "my_part");
654 .ve
655    or at runtime via the option
656 .vb
657     -petscpartitioner_type my_part
658 .ve
659 
660   Level: advanced
661 
662 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
663 
664 @*/
665 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
666 {
667   PetscErrorCode ierr;
668 
669   PetscFunctionBegin;
670   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
671   PetscFunctionReturn(0);
672 }
673 
674 /*@C
675   PetscPartitionerSetType - Builds a particular PetscPartitioner
676 
677   Collective on PetscPartitioner
678 
679   Input Parameters:
680 + part - The PetscPartitioner object
681 - name - The kind of partitioner
682 
683   Options Database Key:
684 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
685 
686   Level: intermediate
687 
688 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
689 @*/
690 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
691 {
692   PetscErrorCode (*r)(PetscPartitioner);
693   PetscBool      match;
694   PetscErrorCode ierr;
695 
696   PetscFunctionBegin;
697   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
698   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
699   if (match) PetscFunctionReturn(0);
700 
701   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
702   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
703   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
704 
705   if (part->ops->destroy) {
706     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
707   }
708   part->noGraph = PETSC_FALSE;
709   ierr = PetscMemzero(part->ops, sizeof(*part->ops));CHKERRQ(ierr);
710   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
711   ierr = (*r)(part);CHKERRQ(ierr);
712   PetscFunctionReturn(0);
713 }
714 
715 /*@C
716   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
717 
718   Not Collective
719 
720   Input Parameter:
721 . part - The PetscPartitioner
722 
723   Output Parameter:
724 . name - The PetscPartitioner type name
725 
726   Level: intermediate
727 
728 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
729 @*/
730 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
731 {
732   PetscErrorCode ierr;
733 
734   PetscFunctionBegin;
735   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
736   PetscValidPointer(name, 2);
737   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
738   *name = ((PetscObject) part)->type_name;
739   PetscFunctionReturn(0);
740 }
741 
742 /*@C
743    PetscPartitionerViewFromOptions - View from Options
744 
745    Collective on PetscPartitioner
746 
747    Input Parameters:
748 +  A - the PetscPartitioner object
749 .  obj - Optional object
750 -  name - command line option
751 
752    Level: intermediate
753 .seealso:  PetscPartitionerView(), PetscObjectViewFromOptions()
754 @*/
755 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner A,PetscObject obj,const char name[])
756 {
757   PetscErrorCode ierr;
758 
759   PetscFunctionBegin;
760   PetscValidHeaderSpecific(A,PETSCPARTITIONER_CLASSID,1);
761   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
762   PetscFunctionReturn(0);
763 }
764 
765 /*@C
766   PetscPartitionerView - Views a PetscPartitioner
767 
768   Collective on PetscPartitioner
769 
770   Input Parameter:
771 + part - the PetscPartitioner object to view
772 - v    - the viewer
773 
774   Level: developer
775 
776 .seealso: PetscPartitionerDestroy()
777 @*/
778 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
779 {
780   PetscMPIInt    size;
781   PetscBool      isascii;
782   PetscErrorCode ierr;
783 
784   PetscFunctionBegin;
785   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
786   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
787   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
788   if (isascii) {
789     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
790     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
791     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
792     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
793     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
794     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
795     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
796   }
797   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
798   PetscFunctionReturn(0);
799 }
800 
801 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
802 {
803   PetscFunctionBegin;
804   if (!currentType) {
805 #if defined(PETSC_HAVE_PARMETIS)
806     *defaultType = PETSCPARTITIONERPARMETIS;
807 #elif defined(PETSC_HAVE_PTSCOTCH)
808     *defaultType = PETSCPARTITIONERPTSCOTCH;
809 #elif defined(PETSC_HAVE_CHACO)
810     *defaultType = PETSCPARTITIONERCHACO;
811 #else
812     *defaultType = PETSCPARTITIONERSIMPLE;
813 #endif
814   } else {
815     *defaultType = currentType;
816   }
817   PetscFunctionReturn(0);
818 }
819 
820 /*@
821   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
822 
823   Collective on PetscPartitioner
824 
825   Input Parameter:
826 . part - the PetscPartitioner object to set options for
827 
828   Level: developer
829 
830 .seealso: PetscPartitionerView()
831 @*/
832 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
833 {
834   const char    *defaultType;
835   char           name[256];
836   PetscBool      flg;
837   PetscErrorCode ierr;
838 
839   PetscFunctionBegin;
840   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
841   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
842   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
843   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
844   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
845   if (flg) {
846     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
847   } else if (!((PetscObject) part)->type_name) {
848     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
849   }
850   if (part->ops->setfromoptions) {
851     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
852   }
853   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
854   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
855   /* process any options handlers added with PetscObjectAddOptionsHandler() */
856   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
857   ierr = PetscOptionsEnd();CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 /*@C
862   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
863 
864   Collective on PetscPartitioner
865 
866   Input Parameter:
867 . part - the PetscPartitioner object to setup
868 
869   Level: developer
870 
871 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
872 @*/
873 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
874 {
875   PetscErrorCode ierr;
876 
877   PetscFunctionBegin;
878   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
879   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
880   PetscFunctionReturn(0);
881 }
882 
883 /*@
884   PetscPartitionerDestroy - Destroys a PetscPartitioner object
885 
886   Collective on PetscPartitioner
887 
888   Input Parameter:
889 . part - the PetscPartitioner object to destroy
890 
891   Level: developer
892 
893 .seealso: PetscPartitionerView()
894 @*/
895 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
896 {
897   PetscErrorCode ierr;
898 
899   PetscFunctionBegin;
900   if (!*part) PetscFunctionReturn(0);
901   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
902 
903   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
904   ((PetscObject) (*part))->refct = 0;
905 
906   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
907   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
908   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 /*@
913   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
914 
915   Collective
916 
917   Input Parameter:
918 . comm - The communicator for the PetscPartitioner object
919 
920   Output Parameter:
921 . part - The PetscPartitioner object
922 
923   Level: beginner
924 
925 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
926 @*/
927 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
928 {
929   PetscPartitioner p;
930   const char       *partitionerType = NULL;
931   PetscErrorCode   ierr;
932 
933   PetscFunctionBegin;
934   PetscValidPointer(part, 2);
935   *part = NULL;
936   ierr = DMInitializePackage();CHKERRQ(ierr);
937 
938   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
939   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
940   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
941 
942   p->edgeCut = 0;
943   p->balance = 0.0;
944 
945   *part = p;
946   PetscFunctionReturn(0);
947 }
948 
949 /*@
950   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
951 
952   Collective on PetscPartitioner
953 
954   Input Parameters:
955 + part    - The PetscPartitioner
956 - dm      - The mesh DM
957 
958   Output Parameters:
959 + partSection     - The PetscSection giving the division of points by partition
960 - partition       - The list of points by partition
961 
962   Options Database:
963 . -petscpartitioner_view_graph - View the graph we are partitioning
964 
965   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
966 
967   Level: developer
968 
969 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
970 @*/
971 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
972 {
973   PetscMPIInt    size;
974   PetscErrorCode ierr;
975 
976   PetscFunctionBegin;
977   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
978   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
979   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
980   PetscValidPointer(partition, 5);
981   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
982   if (size == 1) {
983     PetscInt *points;
984     PetscInt  cStart, cEnd, c;
985 
986     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
987     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
988     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
989     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
990     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
991     for (c = cStart; c < cEnd; ++c) points[c] = c;
992     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
993     PetscFunctionReturn(0);
994   }
995   if (part->height == 0) {
996     PetscInt numVertices = 0;
997     PetscInt *start     = NULL;
998     PetscInt *adjacency = NULL;
999     IS       globalNumbering;
1000 
1001     if (!part->noGraph || part->viewGraph) {
1002       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
1003     } else { /* only compute the number of owned local vertices */
1004       const PetscInt *idxs;
1005       PetscInt       p, pStart, pEnd;
1006 
1007       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1008       ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
1009       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1010       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
1011       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1012     }
1013     if (part->viewGraph) {
1014       PetscViewer viewer = part->viewerGraph;
1015       PetscBool   isascii;
1016       PetscInt    v, i;
1017       PetscMPIInt rank;
1018 
1019       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
1020       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
1021       if (isascii) {
1022         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1023         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
1024         for (v = 0; v < numVertices; ++v) {
1025           const PetscInt s = start[v];
1026           const PetscInt e = start[v+1];
1027 
1028           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
1029           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
1030           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
1031         }
1032         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1033         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1034       }
1035     }
1036     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no partitioning method");
1037     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
1038     ierr = PetscFree(start);CHKERRQ(ierr);
1039     ierr = PetscFree(adjacency);CHKERRQ(ierr);
1040     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1041       const PetscInt *globalNum;
1042       const PetscInt *partIdx;
1043       PetscInt       *map, cStart, cEnd;
1044       PetscInt       *adjusted, i, localSize, offset;
1045       IS             newPartition;
1046 
1047       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
1048       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
1049       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1050       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
1051       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
1052       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1053       for (i = cStart, offset = 0; i < cEnd; i++) {
1054         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1055       }
1056       for (i = 0; i < localSize; i++) {
1057         adjusted[i] = map[partIdx[i]];
1058       }
1059       ierr = PetscFree(map);CHKERRQ(ierr);
1060       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
1061       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1062       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
1063       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
1064       ierr = ISDestroy(partition);CHKERRQ(ierr);
1065       *partition = newPartition;
1066     }
1067   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1068   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1073 {
1074   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1075   PetscErrorCode          ierr;
1076 
1077   PetscFunctionBegin;
1078   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1079   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1080   ierr = PetscFree(p);CHKERRQ(ierr);
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1085 {
1086   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1087   PetscErrorCode          ierr;
1088 
1089   PetscFunctionBegin;
1090   if (p->random) {
1091     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1092     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
1093     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1094   }
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1099 {
1100   PetscBool      iascii;
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1105   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1106   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1107   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1112 {
1113   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1114   PetscErrorCode          ierr;
1115 
1116   PetscFunctionBegin;
1117   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
1118   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
1119   ierr = PetscOptionsTail();CHKERRQ(ierr);
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1124 {
1125   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1126   PetscInt                np;
1127   PetscErrorCode          ierr;
1128 
1129   PetscFunctionBegin;
1130   if (p->random) {
1131     PetscRandom r;
1132     PetscInt   *sizes, *points, v, p;
1133     PetscMPIInt rank;
1134 
1135     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1136     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1137     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
1138     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
1139     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
1140     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1141     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1142     for (v = numVertices-1; v > 0; --v) {
1143       PetscReal val;
1144       PetscInt  w, tmp;
1145 
1146       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
1147       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
1148       w    = PetscFloorReal(val);
1149       tmp       = points[v];
1150       points[v] = points[w];
1151       points[w] = tmp;
1152     }
1153     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1154     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
1155     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
1156   }
1157   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1158   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
1159   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1160   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
1161   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1162   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
1163   *partition = p->partition;
1164   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1169 {
1170   PetscFunctionBegin;
1171   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1172   part->ops->view           = PetscPartitionerView_Shell;
1173   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1174   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1175   part->ops->partition      = PetscPartitionerPartition_Shell;
1176   PetscFunctionReturn(0);
1177 }
1178 
1179 /*MC
1180   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
1181 
1182   Level: intermediate
1183 
1184 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1185 M*/
1186 
1187 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1188 {
1189   PetscPartitioner_Shell *p;
1190   PetscErrorCode          ierr;
1191 
1192   PetscFunctionBegin;
1193   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1194   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1195   part->data = p;
1196 
1197   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1198   p->random = PETSC_FALSE;
1199   PetscFunctionReturn(0);
1200 }
1201 
1202 /*@C
1203   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1204 
1205   Collective on PetscPartitioner
1206 
1207   Input Parameters:
1208 + part   - The PetscPartitioner
1209 . size   - The number of partitions
1210 . sizes  - array of size size (or NULL) providing the number of points in each partition
1211 - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1212 
1213   Level: developer
1214 
1215   Notes:
1216 
1217     It is safe to free the sizes and points arrays after use in this routine.
1218 
1219 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1220 @*/
1221 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1222 {
1223   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1224   PetscInt                proc, numPoints;
1225   PetscErrorCode          ierr;
1226 
1227   PetscFunctionBegin;
1228   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1229   if (sizes)  {PetscValidPointer(sizes, 3);}
1230   if (points) {PetscValidPointer(points, 4);}
1231   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1232   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1233   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1234   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1235   if (sizes) {
1236     for (proc = 0; proc < size; ++proc) {
1237       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1238     }
1239   }
1240   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1241   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1242   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1243   PetscFunctionReturn(0);
1244 }
1245 
1246 /*@
1247   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1248 
1249   Collective on PetscPartitioner
1250 
1251   Input Parameters:
1252 + part   - The PetscPartitioner
1253 - random - The flag to use a random partition
1254 
1255   Level: intermediate
1256 
1257 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1258 @*/
1259 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1260 {
1261   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1262 
1263   PetscFunctionBegin;
1264   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1265   p->random = random;
1266   PetscFunctionReturn(0);
1267 }
1268 
1269 /*@
1270   PetscPartitionerShellGetRandom - get the flag to use a random partition
1271 
1272   Collective on PetscPartitioner
1273 
1274   Input Parameter:
1275 . part   - The PetscPartitioner
1276 
1277   Output Parameter
1278 . random - The flag to use a random partition
1279 
1280   Level: intermediate
1281 
1282 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1283 @*/
1284 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1285 {
1286   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1287 
1288   PetscFunctionBegin;
1289   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1290   PetscValidPointer(random, 2);
1291   *random = p->random;
1292   PetscFunctionReturn(0);
1293 }
1294 
1295 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1296 {
1297   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1298   PetscErrorCode          ierr;
1299 
1300   PetscFunctionBegin;
1301   ierr = PetscFree(p);CHKERRQ(ierr);
1302   PetscFunctionReturn(0);
1303 }
1304 
1305 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1306 {
1307   PetscFunctionBegin;
1308   PetscFunctionReturn(0);
1309 }
1310 
1311 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1312 {
1313   PetscBool      iascii;
1314   PetscErrorCode ierr;
1315 
1316   PetscFunctionBegin;
1317   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1318   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1319   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1320   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1321   PetscFunctionReturn(0);
1322 }
1323 
1324 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1325 {
1326   MPI_Comm       comm;
1327   PetscInt       np;
1328   PetscMPIInt    size;
1329   PetscErrorCode ierr;
1330 
1331   PetscFunctionBegin;
1332   comm = PetscObjectComm((PetscObject)part);
1333   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1334   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1335   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1336   if (size == 1) {
1337     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1338   } else {
1339     PetscMPIInt rank;
1340     PetscInt nvGlobal, *offsets, myFirst, myLast;
1341 
1342     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1343     offsets[0] = 0;
1344     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1345     for (np = 2; np <= size; np++) {
1346       offsets[np] += offsets[np-1];
1347     }
1348     nvGlobal = offsets[size];
1349     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1350     myFirst = offsets[rank];
1351     myLast  = offsets[rank + 1] - 1;
1352     ierr = PetscFree(offsets);CHKERRQ(ierr);
1353     if (numVertices) {
1354       PetscInt firstPart = 0, firstLargePart = 0;
1355       PetscInt lastPart = 0, lastLargePart = 0;
1356       PetscInt rem = nvGlobal % nparts;
1357       PetscInt pSmall = nvGlobal/nparts;
1358       PetscInt pBig = nvGlobal/nparts + 1;
1359 
1360 
1361       if (rem) {
1362         firstLargePart = myFirst / pBig;
1363         lastLargePart  = myLast  / pBig;
1364 
1365         if (firstLargePart < rem) {
1366           firstPart = firstLargePart;
1367         } else {
1368           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1369         }
1370         if (lastLargePart < rem) {
1371           lastPart = lastLargePart;
1372         } else {
1373           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1374         }
1375       } else {
1376         firstPart = myFirst / (nvGlobal/nparts);
1377         lastPart  = myLast  / (nvGlobal/nparts);
1378       }
1379 
1380       for (np = firstPart; np <= lastPart; np++) {
1381         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1382         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1383 
1384         PartStart = PetscMax(PartStart,myFirst);
1385         PartEnd   = PetscMin(PartEnd,myLast+1);
1386         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1387       }
1388     }
1389   }
1390   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1395 {
1396   PetscFunctionBegin;
1397   part->noGraph        = PETSC_TRUE;
1398   part->ops->view      = PetscPartitionerView_Simple;
1399   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1400   part->ops->partition = PetscPartitionerPartition_Simple;
1401   PetscFunctionReturn(0);
1402 }
1403 
1404 /*MC
1405   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1406 
1407   Level: intermediate
1408 
1409 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1410 M*/
1411 
1412 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1413 {
1414   PetscPartitioner_Simple *p;
1415   PetscErrorCode           ierr;
1416 
1417   PetscFunctionBegin;
1418   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1419   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1420   part->data = p;
1421 
1422   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1423   PetscFunctionReturn(0);
1424 }
1425 
1426 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1427 {
1428   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1429   PetscErrorCode          ierr;
1430 
1431   PetscFunctionBegin;
1432   ierr = PetscFree(p);CHKERRQ(ierr);
1433   PetscFunctionReturn(0);
1434 }
1435 
1436 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1437 {
1438   PetscFunctionBegin;
1439   PetscFunctionReturn(0);
1440 }
1441 
1442 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1443 {
1444   PetscBool      iascii;
1445   PetscErrorCode ierr;
1446 
1447   PetscFunctionBegin;
1448   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1449   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1450   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1451   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1452   PetscFunctionReturn(0);
1453 }
1454 
1455 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1456 {
1457   PetscInt       np;
1458   PetscErrorCode ierr;
1459 
1460   PetscFunctionBegin;
1461   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1462   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1463   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1464   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1465   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1470 {
1471   PetscFunctionBegin;
1472   part->noGraph        = PETSC_TRUE;
1473   part->ops->view      = PetscPartitionerView_Gather;
1474   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1475   part->ops->partition = PetscPartitionerPartition_Gather;
1476   PetscFunctionReturn(0);
1477 }
1478 
1479 /*MC
1480   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1481 
1482   Level: intermediate
1483 
1484 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1485 M*/
1486 
1487 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1488 {
1489   PetscPartitioner_Gather *p;
1490   PetscErrorCode           ierr;
1491 
1492   PetscFunctionBegin;
1493   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1494   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1495   part->data = p;
1496 
1497   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1498   PetscFunctionReturn(0);
1499 }
1500 
1501 
1502 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1503 {
1504   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1505   PetscErrorCode          ierr;
1506 
1507   PetscFunctionBegin;
1508   ierr = PetscFree(p);CHKERRQ(ierr);
1509   PetscFunctionReturn(0);
1510 }
1511 
1512 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1513 {
1514   PetscFunctionBegin;
1515   PetscFunctionReturn(0);
1516 }
1517 
1518 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1519 {
1520   PetscBool      iascii;
1521   PetscErrorCode ierr;
1522 
1523   PetscFunctionBegin;
1524   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1525   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1526   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1527   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1528   PetscFunctionReturn(0);
1529 }
1530 
1531 #if defined(PETSC_HAVE_CHACO)
1532 #if defined(PETSC_HAVE_UNISTD_H)
1533 #include <unistd.h>
1534 #endif
1535 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1536 #include <chaco.h>
1537 #else
1538 /* Older versions of Chaco do not have an include file */
1539 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1540                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1541                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1542                        int mesh_dims[3], double *goal, int global_method, int local_method,
1543                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1544 #endif
1545 extern int FREE_GRAPH;
1546 #endif
1547 
1548 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1549 {
1550 #if defined(PETSC_HAVE_CHACO)
1551   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1552   MPI_Comm       comm;
1553   int            nvtxs          = numVertices; /* number of vertices in full graph */
1554   int           *vwgts          = NULL;   /* weights for all vertices */
1555   float         *ewgts          = NULL;   /* weights for all edges */
1556   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1557   char          *outassignname  = NULL;   /*  name of assignment output file */
1558   char          *outfilename    = NULL;   /* output file name */
1559   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1560   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1561   int            mesh_dims[3];            /* dimensions of mesh of processors */
1562   double        *goal          = NULL;    /* desired set sizes for each set */
1563   int            global_method = 1;       /* global partitioning algorithm */
1564   int            local_method  = 1;       /* local partitioning algorithm */
1565   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1566   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1567   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1568   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1569   long           seed          = 123636512; /* for random graph mutations */
1570 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1571   int           *assignment;              /* Output partition */
1572 #else
1573   short int     *assignment;              /* Output partition */
1574 #endif
1575   int            fd_stdout, fd_pipe[2];
1576   PetscInt      *points;
1577   int            i, v, p;
1578   PetscErrorCode ierr;
1579 
1580   PetscFunctionBegin;
1581   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1582 #if defined (PETSC_USE_DEBUG)
1583   {
1584     int ival,isum;
1585     PetscBool distributed;
1586 
1587     ival = (numVertices > 0);
1588     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1589     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1590     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1591   }
1592 #endif
1593   if (!numVertices) {
1594     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1595     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1596     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1597     PetscFunctionReturn(0);
1598   }
1599   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1600   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1601 
1602   if (global_method == INERTIAL_METHOD) {
1603     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1604     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1605   }
1606   mesh_dims[0] = nparts;
1607   mesh_dims[1] = 1;
1608   mesh_dims[2] = 1;
1609   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1610   /* Chaco outputs to stdout. We redirect this to a buffer. */
1611   /* TODO: check error codes for UNIX calls */
1612 #if defined(PETSC_HAVE_UNISTD_H)
1613   {
1614     int piperet;
1615     piperet = pipe(fd_pipe);
1616     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1617     fd_stdout = dup(1);
1618     close(1);
1619     dup2(fd_pipe[1], 1);
1620   }
1621 #endif
1622   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1623                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1624                    vmax, ndims, eigtol, seed);
1625 #if defined(PETSC_HAVE_UNISTD_H)
1626   {
1627     char msgLog[10000];
1628     int  count;
1629 
1630     fflush(stdout);
1631     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1632     if (count < 0) count = 0;
1633     msgLog[count] = 0;
1634     close(1);
1635     dup2(fd_stdout, 1);
1636     close(fd_stdout);
1637     close(fd_pipe[0]);
1638     close(fd_pipe[1]);
1639     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1640   }
1641 #else
1642   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1643 #endif
1644   /* Convert to PetscSection+IS */
1645   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1646   for (v = 0; v < nvtxs; ++v) {
1647     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1648   }
1649   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1650   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1651   for (p = 0, i = 0; p < nparts; ++p) {
1652     for (v = 0; v < nvtxs; ++v) {
1653       if (assignment[v] == p) points[i++] = v;
1654     }
1655   }
1656   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1657   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1658   if (global_method == INERTIAL_METHOD) {
1659     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1660   }
1661   ierr = PetscFree(assignment);CHKERRQ(ierr);
1662   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1663   PetscFunctionReturn(0);
1664 #else
1665   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1666 #endif
1667 }
1668 
1669 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1670 {
1671   PetscFunctionBegin;
1672   part->noGraph        = PETSC_FALSE;
1673   part->ops->view      = PetscPartitionerView_Chaco;
1674   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1675   part->ops->partition = PetscPartitionerPartition_Chaco;
1676   PetscFunctionReturn(0);
1677 }
1678 
1679 /*MC
1680   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1681 
1682   Level: intermediate
1683 
1684 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1685 M*/
1686 
1687 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1688 {
1689   PetscPartitioner_Chaco *p;
1690   PetscErrorCode          ierr;
1691 
1692   PetscFunctionBegin;
1693   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1694   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1695   part->data = p;
1696 
1697   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1698   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1699   PetscFunctionReturn(0);
1700 }
1701 
1702 static const char *ptypes[] = {"kway", "rb"};
1703 
1704 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1705 {
1706   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1707   PetscErrorCode             ierr;
1708 
1709   PetscFunctionBegin;
1710   ierr = PetscFree(p);CHKERRQ(ierr);
1711   PetscFunctionReturn(0);
1712 }
1713 
1714 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1715 {
1716   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1717   PetscErrorCode             ierr;
1718 
1719   PetscFunctionBegin;
1720   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1721   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1722   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1723   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1724   ierr = PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);CHKERRQ(ierr);
1725   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1726   PetscFunctionReturn(0);
1727 }
1728 
1729 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1730 {
1731   PetscBool      iascii;
1732   PetscErrorCode ierr;
1733 
1734   PetscFunctionBegin;
1735   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1736   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1737   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1738   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1739   PetscFunctionReturn(0);
1740 }
1741 
1742 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1743 {
1744   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1745   PetscErrorCode            ierr;
1746 
1747   PetscFunctionBegin;
1748   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1749   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1750   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1751   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1752   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);CHKERRQ(ierr);
1753   ierr = PetscOptionsTail();CHKERRQ(ierr);
1754   PetscFunctionReturn(0);
1755 }
1756 
1757 #if defined(PETSC_HAVE_PARMETIS)
1758 #include <parmetis.h>
1759 #endif
1760 
1761 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1762 {
1763 #if defined(PETSC_HAVE_PARMETIS)
1764   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1765   MPI_Comm       comm;
1766   PetscSection   section;
1767   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1768   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1769   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1770   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1771   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1772   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1773   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1774   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1775   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1776   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1777   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1778   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1779   PetscInt       options[64];               /* Options */
1780   /* Outputs */
1781   PetscInt       v, i, *assignment, *points;
1782   PetscMPIInt    size, rank, p;
1783   PetscErrorCode ierr;
1784 
1785   PetscFunctionBegin;
1786   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1787   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1788   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1789   /* Calculate vertex distribution */
1790   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1791   vtxdist[0] = 0;
1792   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1793   for (p = 2; p <= size; ++p) {
1794     vtxdist[p] += vtxdist[p-1];
1795   }
1796   /* Calculate partition weights */
1797   for (p = 0; p < nparts; ++p) {
1798     tpwgts[p] = 1.0/nparts;
1799   }
1800   ubvec[0] = pm->imbalanceRatio;
1801   /* Weight cells by dofs on cell by default */
1802   ierr = DMGetLocalSection(dm, &section);CHKERRQ(ierr);
1803   for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1804   if (section) {
1805     PetscInt cStart, cEnd, dof;
1806 
1807     /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1808     /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1809     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1810     for (v = cStart; v < cStart + numVertices; ++v) {
1811       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1812       vwgt[v-cStart] = PetscMax(dof, 1);
1813     }
1814   }
1815   wgtflag |= 2; /* have weights on graph vertices */
1816 
1817   if (nparts == 1) {
1818     ierr = PetscArrayzero(assignment, nvtxs);CHKERRQ(ierr);
1819   } else {
1820     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1821     if (vtxdist[p+1] == vtxdist[size]) {
1822       if (rank == p) {
1823         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1824         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1825         options[METIS_OPTION_SEED]   = pm->randomSeed;
1826         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1827         if (metis_ptype == 1) {
1828           PetscStackPush("METIS_PartGraphRecursive");
1829           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1830           PetscStackPop;
1831           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1832         } else {
1833           /*
1834            It would be nice to activate the two options below, but they would need some actual testing.
1835            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1836            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1837           */
1838           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1839           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1840           PetscStackPush("METIS_PartGraphKway");
1841           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1842           PetscStackPop;
1843           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1844         }
1845       }
1846     } else {
1847       options[0] = 1; /*use options */
1848       options[1] = pm->debugFlag;
1849       options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1850       PetscStackPush("ParMETIS_V3_PartKway");
1851       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1852       PetscStackPop;
1853       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1854     }
1855   }
1856   /* Convert to PetscSection+IS */
1857   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1858   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1859   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1860   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1861   for (p = 0, i = 0; p < nparts; ++p) {
1862     for (v = 0; v < nvtxs; ++v) {
1863       if (assignment[v] == p) points[i++] = v;
1864     }
1865   }
1866   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1867   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1868   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1869   PetscFunctionReturn(0);
1870 #else
1871   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1872 #endif
1873 }
1874 
1875 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1876 {
1877   PetscFunctionBegin;
1878   part->noGraph             = PETSC_FALSE;
1879   part->ops->view           = PetscPartitionerView_ParMetis;
1880   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1881   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1882   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1883   PetscFunctionReturn(0);
1884 }
1885 
1886 /*MC
1887   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1888 
1889   Level: intermediate
1890 
1891 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1892 M*/
1893 
1894 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1895 {
1896   PetscPartitioner_ParMetis *p;
1897   PetscErrorCode          ierr;
1898 
1899   PetscFunctionBegin;
1900   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1901   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1902   part->data = p;
1903 
1904   p->ptype          = 0;
1905   p->imbalanceRatio = 1.05;
1906   p->debugFlag      = 0;
1907   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
1908 
1909   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1910   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1911   PetscFunctionReturn(0);
1912 }
1913 
1914 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1915 const char PTScotchPartitionerCitation[] =
1916   "@article{PTSCOTCH,\n"
1917   "  author  = {C. Chevalier and F. Pellegrini},\n"
1918   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1919   "  journal = {Parallel Computing},\n"
1920   "  volume  = {34},\n"
1921   "  number  = {6},\n"
1922   "  pages   = {318--331},\n"
1923   "  year    = {2008},\n"
1924   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1925   "}\n";
1926 
1927 #if defined(PETSC_HAVE_PTSCOTCH)
1928 
1929 EXTERN_C_BEGIN
1930 #include <ptscotch.h>
1931 EXTERN_C_END
1932 
1933 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1934 
1935 static int PTScotch_Strategy(PetscInt strategy)
1936 {
1937   switch (strategy) {
1938   case  0: return SCOTCH_STRATDEFAULT;
1939   case  1: return SCOTCH_STRATQUALITY;
1940   case  2: return SCOTCH_STRATSPEED;
1941   case  3: return SCOTCH_STRATBALANCE;
1942   case  4: return SCOTCH_STRATSAFETY;
1943   case  5: return SCOTCH_STRATSCALABILITY;
1944   case  6: return SCOTCH_STRATRECURSIVE;
1945   case  7: return SCOTCH_STRATREMAP;
1946   default: return SCOTCH_STRATDEFAULT;
1947   }
1948 }
1949 
1950 
1951 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1952                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1953 {
1954   SCOTCH_Graph   grafdat;
1955   SCOTCH_Strat   stradat;
1956   SCOTCH_Num     vertnbr = n;
1957   SCOTCH_Num     edgenbr = xadj[n];
1958   SCOTCH_Num*    velotab = vtxwgt;
1959   SCOTCH_Num*    edlotab = adjwgt;
1960   SCOTCH_Num     flagval = strategy;
1961   double         kbalval = imbalance;
1962   PetscErrorCode ierr;
1963 
1964   PetscFunctionBegin;
1965   {
1966     PetscBool flg = PETSC_TRUE;
1967     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1968     if (!flg) velotab = NULL;
1969   }
1970   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1971   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1972   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1973   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1974 #if defined(PETSC_USE_DEBUG)
1975   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1976 #endif
1977   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1978   SCOTCH_stratExit(&stradat);
1979   SCOTCH_graphExit(&grafdat);
1980   PetscFunctionReturn(0);
1981 }
1982 
1983 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1984                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1985 {
1986   PetscMPIInt     procglbnbr;
1987   PetscMPIInt     proclocnum;
1988   SCOTCH_Arch     archdat;
1989   SCOTCH_Dgraph   grafdat;
1990   SCOTCH_Dmapping mappdat;
1991   SCOTCH_Strat    stradat;
1992   SCOTCH_Num      vertlocnbr;
1993   SCOTCH_Num      edgelocnbr;
1994   SCOTCH_Num*     veloloctab = vtxwgt;
1995   SCOTCH_Num*     edloloctab = adjwgt;
1996   SCOTCH_Num      flagval = strategy;
1997   double          kbalval = imbalance;
1998   PetscErrorCode  ierr;
1999 
2000   PetscFunctionBegin;
2001   {
2002     PetscBool flg = PETSC_TRUE;
2003     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2004     if (!flg) veloloctab = NULL;
2005   }
2006   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
2007   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
2008   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
2009   edgelocnbr = xadj[vertlocnbr];
2010 
2011   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
2012   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
2013 #if defined(PETSC_USE_DEBUG)
2014   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
2015 #endif
2016   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2017   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
2018   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2019   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2020   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
2021 
2022   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2023   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2024   SCOTCH_archExit(&archdat);
2025   SCOTCH_stratExit(&stradat);
2026   SCOTCH_dgraphExit(&grafdat);
2027   PetscFunctionReturn(0);
2028 }
2029 
2030 #endif /* PETSC_HAVE_PTSCOTCH */
2031 
2032 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2033 {
2034   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2035   PetscErrorCode             ierr;
2036 
2037   PetscFunctionBegin;
2038   ierr = PetscFree(p);CHKERRQ(ierr);
2039   PetscFunctionReturn(0);
2040 }
2041 
2042 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2043 {
2044   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2045   PetscErrorCode            ierr;
2046 
2047   PetscFunctionBegin;
2048   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
2049   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
2050   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
2051   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
2052   PetscFunctionReturn(0);
2053 }
2054 
2055 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2056 {
2057   PetscBool      iascii;
2058   PetscErrorCode ierr;
2059 
2060   PetscFunctionBegin;
2061   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2062   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
2063   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
2064   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
2065   PetscFunctionReturn(0);
2066 }
2067 
2068 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2069 {
2070   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2071   const char *const         *slist = PTScotchStrategyList;
2072   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2073   PetscBool                 flag;
2074   PetscErrorCode            ierr;
2075 
2076   PetscFunctionBegin;
2077   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
2078   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
2079   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
2080   ierr = PetscOptionsTail();CHKERRQ(ierr);
2081   PetscFunctionReturn(0);
2082 }
2083 
2084 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
2085 {
2086 #if defined(PETSC_HAVE_PTSCOTCH)
2087   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
2088   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
2089   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
2090   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
2091   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
2092   PetscInt      *vwgt       = NULL;        /* Vertex weights */
2093   PetscInt      *adjwgt     = NULL;        /* Edge weights */
2094   PetscInt       v, i, *assignment, *points;
2095   PetscMPIInt    size, rank, p;
2096   PetscErrorCode ierr;
2097 
2098   PetscFunctionBegin;
2099   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2100   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2101   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
2102 
2103   /* Calculate vertex distribution */
2104   vtxdist[0] = 0;
2105   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2106   for (p = 2; p <= size; ++p) {
2107     vtxdist[p] += vtxdist[p-1];
2108   }
2109 
2110   if (nparts == 1) {
2111     ierr = PetscArrayzero(assignment, nvtxs);CHKERRQ(ierr);
2112   } else { /* Weight cells by dofs on cell by default */
2113     PetscSection section;
2114 
2115     /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
2116     /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
2117     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
2118     for (v = 0; v < PetscMax(nvtxs,1); ++v) vwgt[v] = 1;
2119     ierr = DMGetLocalSection(dm, &section);CHKERRQ(ierr);
2120     if (section) {
2121       PetscInt vStart, vEnd, dof;
2122       ierr = DMPlexGetHeightStratum(dm, part->height, &vStart, &vEnd);CHKERRQ(ierr);
2123       for (v = vStart; v < vStart + numVertices; ++v) {
2124         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
2125         vwgt[v-vStart] = PetscMax(dof, 1);
2126       }
2127     }
2128     {
2129       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2130       int                       strat = PTScotch_Strategy(pts->strategy);
2131       double                    imbal = (double)pts->imbalance;
2132 
2133       for (p = 0; !vtxdist[p+1] && p < size; ++p);
2134       if (vtxdist[p+1] == vtxdist[size]) {
2135         if (rank == p) {
2136           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
2137         }
2138       } else {
2139         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
2140       }
2141     }
2142     ierr = PetscFree(vwgt);CHKERRQ(ierr);
2143   }
2144 
2145   /* Convert to PetscSection+IS */
2146   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
2147   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2148   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
2149   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2150   for (p = 0, i = 0; p < nparts; ++p) {
2151     for (v = 0; v < nvtxs; ++v) {
2152       if (assignment[v] == p) points[i++] = v;
2153     }
2154   }
2155   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2156   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2157 
2158   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
2159   PetscFunctionReturn(0);
2160 #else
2161   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2162 #endif
2163 }
2164 
2165 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2166 {
2167   PetscFunctionBegin;
2168   part->noGraph             = PETSC_FALSE;
2169   part->ops->view           = PetscPartitionerView_PTScotch;
2170   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2171   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2172   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2173   PetscFunctionReturn(0);
2174 }
2175 
2176 /*MC
2177   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2178 
2179   Level: intermediate
2180 
2181 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2182 M*/
2183 
2184 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2185 {
2186   PetscPartitioner_PTScotch *p;
2187   PetscErrorCode          ierr;
2188 
2189   PetscFunctionBegin;
2190   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2191   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2192   part->data = p;
2193 
2194   p->strategy  = 0;
2195   p->imbalance = 0.01;
2196 
2197   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2198   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2199   PetscFunctionReturn(0);
2200 }
2201 
2202 
2203 /*@
2204   DMPlexGetPartitioner - Get the mesh partitioner
2205 
2206   Not collective
2207 
2208   Input Parameter:
2209 . dm - The DM
2210 
2211   Output Parameter:
2212 . part - The PetscPartitioner
2213 
2214   Level: developer
2215 
2216   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2217 
2218 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
2219 @*/
2220 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2221 {
2222   DM_Plex       *mesh = (DM_Plex *) dm->data;
2223 
2224   PetscFunctionBegin;
2225   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2226   PetscValidPointer(part, 2);
2227   *part = mesh->partitioner;
2228   PetscFunctionReturn(0);
2229 }
2230 
2231 /*@
2232   DMPlexSetPartitioner - Set the mesh partitioner
2233 
2234   logically collective on DM
2235 
2236   Input Parameters:
2237 + dm - The DM
2238 - part - The partitioner
2239 
2240   Level: developer
2241 
2242   Note: Any existing PetscPartitioner will be destroyed.
2243 
2244 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2245 @*/
2246 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2247 {
2248   DM_Plex       *mesh = (DM_Plex *) dm->data;
2249   PetscErrorCode ierr;
2250 
2251   PetscFunctionBegin;
2252   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2253   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2254   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2255   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2256   mesh->partitioner = part;
2257   PetscFunctionReturn(0);
2258 }
2259 
2260 static PetscErrorCode DMPlexAddClosure_Private(DM dm, PetscHSetI ht, PetscInt point)
2261 {
2262   const PetscInt *cone;
2263   PetscInt       coneSize, c;
2264   PetscBool      missing;
2265   PetscErrorCode ierr;
2266 
2267   PetscFunctionBeginHot;
2268   ierr = PetscHSetIQueryAdd(ht, point, &missing);CHKERRQ(ierr);
2269   if (missing) {
2270     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2271     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2272     for (c = 0; c < coneSize; c++) {
2273       ierr = DMPlexAddClosure_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2274     }
2275   }
2276   PetscFunctionReturn(0);
2277 }
2278 
2279 PETSC_UNUSED static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2280 {
2281   PetscErrorCode ierr;
2282 
2283   PetscFunctionBegin;
2284   if (up) {
2285     PetscInt parent;
2286 
2287     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2288     if (parent != point) {
2289       PetscInt closureSize, *closure = NULL, i;
2290 
2291       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2292       for (i = 0; i < closureSize; i++) {
2293         PetscInt cpoint = closure[2*i];
2294 
2295         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2296         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2297       }
2298       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2299     }
2300   }
2301   if (down) {
2302     PetscInt numChildren;
2303     const PetscInt *children;
2304 
2305     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2306     if (numChildren) {
2307       PetscInt i;
2308 
2309       for (i = 0; i < numChildren; i++) {
2310         PetscInt cpoint = children[i];
2311 
2312         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2313         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2314       }
2315     }
2316   }
2317   PetscFunctionReturn(0);
2318 }
2319 
2320 static PetscErrorCode DMPlexAddClosureTree_Up_Private(DM dm, PetscHSetI ht, PetscInt point)
2321 {
2322   PetscInt       parent;
2323   PetscErrorCode ierr;
2324 
2325   PetscFunctionBeginHot;
2326   ierr = DMPlexGetTreeParent(dm, point, &parent,NULL);CHKERRQ(ierr);
2327   if (point != parent) {
2328     const PetscInt *cone;
2329     PetscInt       coneSize, c;
2330 
2331     ierr = DMPlexAddClosureTree_Up_Private(dm, ht, parent);CHKERRQ(ierr);
2332     ierr = DMPlexAddClosure_Private(dm, ht, parent);CHKERRQ(ierr);
2333     ierr = DMPlexGetCone(dm, parent, &cone);CHKERRQ(ierr);
2334     ierr = DMPlexGetConeSize(dm, parent, &coneSize);CHKERRQ(ierr);
2335     for (c = 0; c < coneSize; c++) {
2336       const PetscInt cp = cone[c];
2337 
2338       ierr = DMPlexAddClosureTree_Up_Private(dm, ht, cp);CHKERRQ(ierr);
2339     }
2340   }
2341   PetscFunctionReturn(0);
2342 }
2343 
2344 static PetscErrorCode DMPlexAddClosureTree_Down_Private(DM dm, PetscHSetI ht, PetscInt point)
2345 {
2346   PetscInt       i, numChildren;
2347   const PetscInt *children;
2348   PetscErrorCode ierr;
2349 
2350   PetscFunctionBeginHot;
2351   ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
2352   for (i = 0; i < numChildren; i++) {
2353     ierr = PetscHSetIAdd(ht, children[i]);CHKERRQ(ierr);
2354   }
2355   PetscFunctionReturn(0);
2356 }
2357 
2358 static PetscErrorCode DMPlexAddClosureTree_Private(DM dm, PetscHSetI ht, PetscInt point)
2359 {
2360   const PetscInt *cone;
2361   PetscInt       coneSize, c;
2362   PetscErrorCode ierr;
2363 
2364   PetscFunctionBeginHot;
2365   ierr = PetscHSetIAdd(ht, point);CHKERRQ(ierr);
2366   ierr = DMPlexAddClosureTree_Up_Private(dm, ht, point);CHKERRQ(ierr);
2367   ierr = DMPlexAddClosureTree_Down_Private(dm, ht, point);CHKERRQ(ierr);
2368   ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2369   ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2370   for (c = 0; c < coneSize; c++) {
2371     ierr = DMPlexAddClosureTree_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2372   }
2373   PetscFunctionReturn(0);
2374 }
2375 
2376 PetscErrorCode DMPlexClosurePoints_Private(DM dm, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2377 {
2378   DM_Plex         *mesh = (DM_Plex *)dm->data;
2379   const PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2380   PetscInt        nelems, *elems, off = 0, p;
2381   PetscHSetI      ht;
2382   PetscErrorCode  ierr;
2383 
2384   PetscFunctionBegin;
2385   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2386   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2387   if (!hasTree) {
2388     for (p = 0; p < numPoints; ++p) {
2389       ierr = DMPlexAddClosure_Private(dm, ht, points[p]);CHKERRQ(ierr);
2390     }
2391   } else {
2392 #if 1
2393     for (p = 0; p < numPoints; ++p) {
2394       ierr = DMPlexAddClosureTree_Private(dm, ht, points[p]);CHKERRQ(ierr);
2395     }
2396 #else
2397     PetscInt  *closure = NULL, closureSize, c;
2398     for (p = 0; p < numPoints; ++p) {
2399       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2400       for (c = 0; c < closureSize*2; c += 2) {
2401         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2402         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2403       }
2404     }
2405     if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2406 #endif
2407   }
2408   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2409   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2410   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2411   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2412   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2413   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2414   PetscFunctionReturn(0);
2415 }
2416 
2417 /*@
2418   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2419 
2420   Input Parameters:
2421 + dm     - The DM
2422 - label  - DMLabel assinging ranks to remote roots
2423 
2424   Level: developer
2425 
2426 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2427 @*/
2428 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2429 {
2430   IS              rankIS,   pointIS, closureIS;
2431   const PetscInt *ranks,   *points;
2432   PetscInt        numRanks, numPoints, r;
2433   PetscErrorCode  ierr;
2434 
2435   PetscFunctionBegin;
2436   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2437   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2438   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2439   for (r = 0; r < numRanks; ++r) {
2440     const PetscInt rank = ranks[r];
2441     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2442     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2443     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2444     ierr = DMPlexClosurePoints_Private(dm, numPoints, points, &closureIS);CHKERRQ(ierr);
2445     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2446     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2447     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2448     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2449   }
2450   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2451   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2452   PetscFunctionReturn(0);
2453 }
2454 
2455 /*@
2456   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2457 
2458   Input Parameters:
2459 + dm     - The DM
2460 - label  - DMLabel assinging ranks to remote roots
2461 
2462   Level: developer
2463 
2464 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2465 @*/
2466 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2467 {
2468   IS              rankIS,   pointIS;
2469   const PetscInt *ranks,   *points;
2470   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2471   PetscInt       *adj = NULL;
2472   PetscErrorCode  ierr;
2473 
2474   PetscFunctionBegin;
2475   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2476   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2477   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2478   for (r = 0; r < numRanks; ++r) {
2479     const PetscInt rank = ranks[r];
2480 
2481     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2482     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2483     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2484     for (p = 0; p < numPoints; ++p) {
2485       adjSize = PETSC_DETERMINE;
2486       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2487       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2488     }
2489     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2490     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2491   }
2492   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2493   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2494   ierr = PetscFree(adj);CHKERRQ(ierr);
2495   PetscFunctionReturn(0);
2496 }
2497 
2498 /*@
2499   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2500 
2501   Input Parameters:
2502 + dm     - The DM
2503 - label  - DMLabel assinging ranks to remote roots
2504 
2505   Level: developer
2506 
2507   Note: This is required when generating multi-level overlaps to capture
2508   overlap points from non-neighbouring partitions.
2509 
2510 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2511 @*/
2512 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2513 {
2514   MPI_Comm        comm;
2515   PetscMPIInt     rank;
2516   PetscSF         sfPoint;
2517   DMLabel         lblRoots, lblLeaves;
2518   IS              rankIS, pointIS;
2519   const PetscInt *ranks;
2520   PetscInt        numRanks, r;
2521   PetscErrorCode  ierr;
2522 
2523   PetscFunctionBegin;
2524   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2525   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2526   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2527   /* Pull point contributions from remote leaves into local roots */
2528   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2529   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2530   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2531   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2532   for (r = 0; r < numRanks; ++r) {
2533     const PetscInt remoteRank = ranks[r];
2534     if (remoteRank == rank) continue;
2535     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2536     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2537     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2538   }
2539   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2540   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2541   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2542   /* Push point contributions from roots into remote leaves */
2543   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2544   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2545   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2546   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2547   for (r = 0; r < numRanks; ++r) {
2548     const PetscInt remoteRank = ranks[r];
2549     if (remoteRank == rank) continue;
2550     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2551     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2552     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2553   }
2554   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2555   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2556   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2557   PetscFunctionReturn(0);
2558 }
2559 
2560 /*@
2561   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2562 
2563   Input Parameters:
2564 + dm        - The DM
2565 . rootLabel - DMLabel assinging ranks to local roots
2566 - processSF - A star forest mapping into the local index on each remote rank
2567 
2568   Output Parameter:
2569 . leafLabel - DMLabel assinging ranks to remote roots
2570 
2571   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2572   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2573 
2574   Level: developer
2575 
2576 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2577 @*/
2578 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2579 {
2580   MPI_Comm           comm;
2581   PetscMPIInt        rank, size, r;
2582   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2583   PetscSF            sfPoint;
2584   PetscSection       rootSection;
2585   PetscSFNode       *rootPoints, *leafPoints;
2586   const PetscSFNode *remote;
2587   const PetscInt    *local, *neighbors;
2588   IS                 valueIS;
2589   PetscBool          mpiOverflow = PETSC_FALSE;
2590   PetscErrorCode     ierr;
2591 
2592   PetscFunctionBegin;
2593   ierr = PetscLogEventBegin(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2594   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2595   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2596   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2597   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2598 
2599   /* Convert to (point, rank) and use actual owners */
2600   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2601   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2602   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2603   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2604   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2605   for (n = 0; n < numNeighbors; ++n) {
2606     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2607     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2608   }
2609   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2610   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2611   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2612   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2613   for (n = 0; n < numNeighbors; ++n) {
2614     IS              pointIS;
2615     const PetscInt *points;
2616 
2617     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2618     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2619     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2620     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2621     for (p = 0; p < numPoints; ++p) {
2622       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2623       else       {l = -1;}
2624       if (l >= 0) {rootPoints[off+p] = remote[l];}
2625       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2626     }
2627     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2628     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2629   }
2630 
2631   /* Try to communicate overlap using All-to-All */
2632   if (!processSF) {
2633     PetscInt64  counter = 0;
2634     PetscBool   locOverflow = PETSC_FALSE;
2635     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2636 
2637     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2638     for (n = 0; n < numNeighbors; ++n) {
2639       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2640       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2641 #if defined(PETSC_USE_64BIT_INDICES)
2642       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2643       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2644 #endif
2645       scounts[neighbors[n]] = (PetscMPIInt) dof;
2646       sdispls[neighbors[n]] = (PetscMPIInt) off;
2647     }
2648     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2649     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2650     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2651     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2652     if (!mpiOverflow) {
2653       ierr = PetscInfo(dm,"Using Alltoallv for mesh distribution\n");CHKERRQ(ierr);
2654       leafSize = (PetscInt) counter;
2655       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2656       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2657     }
2658     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2659   }
2660 
2661   /* Communicate overlap using process star forest */
2662   if (processSF || mpiOverflow) {
2663     PetscSF      procSF;
2664     PetscSection leafSection;
2665 
2666     if (processSF) {
2667       ierr = PetscInfo(dm,"Using processSF for mesh distribution\n");CHKERRQ(ierr);
2668       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2669       procSF = processSF;
2670     } else {
2671       ierr = PetscInfo(dm,"Using processSF for mesh distribution (MPI overflow)\n");CHKERRQ(ierr);
2672       ierr = PetscSFCreate(comm,&procSF);CHKERRQ(ierr);
2673       ierr = PetscSFSetGraphWithPattern(procSF,NULL,PETSCSF_PATTERN_ALLTOALL);CHKERRQ(ierr);
2674     }
2675 
2676     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2677     ierr = DMPlexDistributeData(dm, procSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2678     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2679     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2680     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2681   }
2682 
2683   for (p = 0; p < leafSize; p++) {
2684     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2685   }
2686 
2687   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2688   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2689   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2690   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2691   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2692   ierr = PetscLogEventEnd(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2693   PetscFunctionReturn(0);
2694 }
2695 
2696 /*@
2697   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2698 
2699   Input Parameters:
2700 + dm    - The DM
2701 . label - DMLabel assinging ranks to remote roots
2702 
2703   Output Parameter:
2704 . sf    - The star forest communication context encapsulating the defined mapping
2705 
2706   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2707 
2708   Level: developer
2709 
2710 .seealso: DMPlexDistribute(), DMPlexCreateOverlap()
2711 @*/
2712 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2713 {
2714   PetscMPIInt     rank;
2715   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0, nNeighbors;
2716   PetscSFNode    *remotePoints;
2717   IS              remoteRootIS, neighborsIS;
2718   const PetscInt *remoteRoots, *neighbors;
2719   PetscErrorCode ierr;
2720 
2721   PetscFunctionBegin;
2722   ierr = PetscLogEventBegin(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
2723   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2724 
2725   ierr = DMLabelGetValueIS(label, &neighborsIS);CHKERRQ(ierr);
2726 #if 0
2727   {
2728     IS is;
2729     ierr = ISDuplicate(neighborsIS, &is);CHKERRQ(ierr);
2730     ierr = ISSort(is);CHKERRQ(ierr);
2731     ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
2732     neighborsIS = is;
2733   }
2734 #endif
2735   ierr = ISGetLocalSize(neighborsIS, &nNeighbors);CHKERRQ(ierr);
2736   ierr = ISGetIndices(neighborsIS, &neighbors);CHKERRQ(ierr);
2737   for (numRemote = 0, n = 0; n < nNeighbors; ++n) {
2738     ierr = DMLabelGetStratumSize(label, neighbors[n], &numPoints);CHKERRQ(ierr);
2739     numRemote += numPoints;
2740   }
2741   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2742   /* Put owned points first */
2743   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2744   if (numPoints > 0) {
2745     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2746     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2747     for (p = 0; p < numPoints; p++) {
2748       remotePoints[idx].index = remoteRoots[p];
2749       remotePoints[idx].rank = rank;
2750       idx++;
2751     }
2752     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2753     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2754   }
2755   /* Now add remote points */
2756   for (n = 0; n < nNeighbors; ++n) {
2757     const PetscInt nn = neighbors[n];
2758 
2759     ierr = DMLabelGetStratumSize(label, nn, &numPoints);CHKERRQ(ierr);
2760     if (nn == rank || numPoints <= 0) continue;
2761     ierr = DMLabelGetStratumIS(label, nn, &remoteRootIS);CHKERRQ(ierr);
2762     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2763     for (p = 0; p < numPoints; p++) {
2764       remotePoints[idx].index = remoteRoots[p];
2765       remotePoints[idx].rank = nn;
2766       idx++;
2767     }
2768     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2769     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2770   }
2771   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2772   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2773   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2774   ierr = PetscSFSetUp(*sf);CHKERRQ(ierr);
2775   ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
2776   ierr = PetscLogEventEnd(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
2777   PetscFunctionReturn(0);
2778 }
2779 
2780 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
2781  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
2782  * them out in that case. */
2783 #if defined(PETSC_HAVE_PARMETIS)
2784 /*@C
2785 
2786   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
2787 
2788   Input parameters:
2789 + dm                - The DMPlex object.
2790 . n                 - The number of points.
2791 . pointsToRewrite   - The points in the SF whose ownership will change.
2792 . targetOwners      - New owner for each element in pointsToRewrite.
2793 - degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
2794 
2795   Level: developer
2796 
2797 @*/
2798 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
2799 {
2800   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
2801   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
2802   PetscSFNode  *leafLocationsNew;
2803   const         PetscSFNode *iremote;
2804   const         PetscInt *ilocal;
2805   PetscBool    *isLeaf;
2806   PetscSF       sf;
2807   MPI_Comm      comm;
2808   PetscMPIInt   rank, size;
2809 
2810   PetscFunctionBegin;
2811   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2812   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2813   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2814   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2815 
2816   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2817   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2818   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2819   for (i=0; i<pEnd-pStart; i++) {
2820     isLeaf[i] = PETSC_FALSE;
2821   }
2822   for (i=0; i<nleafs; i++) {
2823     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2824   }
2825 
2826   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
2827   cumSumDegrees[0] = 0;
2828   for (i=1; i<=pEnd-pStart; i++) {
2829     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
2830   }
2831   sumDegrees = cumSumDegrees[pEnd-pStart];
2832   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
2833 
2834   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
2835   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
2836   for (i=0; i<pEnd-pStart; i++) {
2837     rankOnLeafs[i] = rank;
2838   }
2839   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2840   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2841   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
2842 
2843   /* get the remote local points of my leaves */
2844   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
2845   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
2846   for (i=0; i<pEnd-pStart; i++) {
2847     points[i] = pStart+i;
2848   }
2849   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2850   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2851   ierr = PetscFree(points);CHKERRQ(ierr);
2852   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
2853   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
2854   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
2855   for (i=0; i<pEnd-pStart; i++) {
2856     newOwners[i] = -1;
2857     newNumbers[i] = -1;
2858   }
2859   {
2860     PetscInt oldNumber, newNumber, oldOwner, newOwner;
2861     for (i=0; i<n; i++) {
2862       oldNumber = pointsToRewrite[i];
2863       newNumber = -1;
2864       oldOwner = rank;
2865       newOwner = targetOwners[i];
2866       if (oldOwner == newOwner) {
2867         newNumber = oldNumber;
2868       } else {
2869         for (j=0; j<degrees[oldNumber]; j++) {
2870           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
2871             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
2872             break;
2873           }
2874         }
2875       }
2876       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
2877 
2878       newOwners[oldNumber] = newOwner;
2879       newNumbers[oldNumber] = newNumber;
2880     }
2881   }
2882   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
2883   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
2884   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
2885 
2886   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2887   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2888   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2889   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2890 
2891   /* Now count how many leafs we have on each processor. */
2892   leafCounter=0;
2893   for (i=0; i<pEnd-pStart; i++) {
2894     if (newOwners[i] >= 0) {
2895       if (newOwners[i] != rank) {
2896         leafCounter++;
2897       }
2898     } else {
2899       if (isLeaf[i]) {
2900         leafCounter++;
2901       }
2902     }
2903   }
2904 
2905   /* Now set up the new sf by creating the leaf arrays */
2906   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
2907   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
2908 
2909   leafCounter = 0;
2910   counter = 0;
2911   for (i=0; i<pEnd-pStart; i++) {
2912     if (newOwners[i] >= 0) {
2913       if (newOwners[i] != rank) {
2914         leafsNew[leafCounter] = i;
2915         leafLocationsNew[leafCounter].rank = newOwners[i];
2916         leafLocationsNew[leafCounter].index = newNumbers[i];
2917         leafCounter++;
2918       }
2919     } else {
2920       if (isLeaf[i]) {
2921         leafsNew[leafCounter] = i;
2922         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
2923         leafLocationsNew[leafCounter].index = iremote[counter].index;
2924         leafCounter++;
2925       }
2926     }
2927     if (isLeaf[i]) {
2928       counter++;
2929     }
2930   }
2931 
2932   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
2933   ierr = PetscFree(newOwners);CHKERRQ(ierr);
2934   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
2935   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
2936   PetscFunctionReturn(0);
2937 }
2938 
2939 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
2940 {
2941   PetscInt *distribution, min, max, sum, i, ierr;
2942   PetscMPIInt rank, size;
2943   PetscFunctionBegin;
2944   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2945   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2946   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
2947   for (i=0; i<n; i++) {
2948     if (part) distribution[part[i]] += vtxwgt[skip*i];
2949     else distribution[rank] += vtxwgt[skip*i];
2950   }
2951   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2952   min = distribution[0];
2953   max = distribution[0];
2954   sum = distribution[0];
2955   for (i=1; i<size; i++) {
2956     if (distribution[i]<min) min=distribution[i];
2957     if (distribution[i]>max) max=distribution[i];
2958     sum += distribution[i];
2959   }
2960   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
2961   ierr = PetscFree(distribution);CHKERRQ(ierr);
2962   PetscFunctionReturn(0);
2963 }
2964 
2965 #endif
2966 
2967 /*@
2968   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
2969 
2970   Input parameters:
2971 + dm               - The DMPlex object.
2972 . entityDepth      - depth of the entity to balance (0 -> balance vertices).
2973 . useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
2974 - parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
2975 
2976   Output parameters:
2977 . success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
2978 
2979   Level: intermediate
2980 
2981 @*/
2982 
2983 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
2984 {
2985 #if defined(PETSC_HAVE_PARMETIS)
2986   PetscSF     sf;
2987   PetscInt    ierr, i, j, idx, jdx;
2988   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
2989   const       PetscInt *degrees, *ilocal;
2990   const       PetscSFNode *iremote;
2991   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
2992   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
2993   PetscMPIInt rank, size;
2994   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
2995   const       PetscInt *cumSumVertices;
2996   PetscInt    offset, counter;
2997   PetscInt    lenadjncy;
2998   PetscInt    *xadj, *adjncy, *vtxwgt;
2999   PetscInt    lenxadj;
3000   PetscInt    *adjwgt = NULL;
3001   PetscInt    *part, *options;
3002   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
3003   real_t      *ubvec;
3004   PetscInt    *firstVertices, *renumbering;
3005   PetscInt    failed, failedGlobal;
3006   MPI_Comm    comm;
3007   Mat         A, Apre;
3008   const char *prefix = NULL;
3009   PetscViewer       viewer;
3010   PetscViewerFormat format;
3011   PetscLayout layout;
3012 
3013   PetscFunctionBegin;
3014   if (success) *success = PETSC_FALSE;
3015   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3016   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3017   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3018   if (size==1) PetscFunctionReturn(0);
3019 
3020   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3021 
3022   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
3023   if (viewer) {
3024     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
3025   }
3026 
3027   /* Figure out all points in the plex that we are interested in balancing. */
3028   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
3029   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3030   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
3031 
3032   for (i=0; i<pEnd-pStart; i++) {
3033     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
3034   }
3035 
3036   /* There are three types of points:
3037    * exclusivelyOwned: points that are owned by this process and only seen by this process
3038    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
3039    * leaf: a point that is seen by this process but owned by a different process
3040    */
3041   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3042   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3043   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3044   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
3045   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
3046   for (i=0; i<pEnd-pStart; i++) {
3047     isNonExclusivelyOwned[i] = PETSC_FALSE;
3048     isExclusivelyOwned[i] = PETSC_FALSE;
3049     isLeaf[i] = PETSC_FALSE;
3050   }
3051 
3052   /* start by marking all the leafs */
3053   for (i=0; i<nleafs; i++) {
3054     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3055   }
3056 
3057   /* for an owned point, we can figure out whether another processor sees it or
3058    * not by calculating its degree */
3059   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
3060   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
3061 
3062   numExclusivelyOwned = 0;
3063   numNonExclusivelyOwned = 0;
3064   for (i=0; i<pEnd-pStart; i++) {
3065     if (toBalance[i]) {
3066       if (degrees[i] > 0) {
3067         isNonExclusivelyOwned[i] = PETSC_TRUE;
3068         numNonExclusivelyOwned += 1;
3069       } else {
3070         if (!isLeaf[i]) {
3071           isExclusivelyOwned[i] = PETSC_TRUE;
3072           numExclusivelyOwned += 1;
3073         }
3074       }
3075     }
3076   }
3077 
3078   /* We are going to build a graph with one vertex per core representing the
3079    * exclusively owned points and then one vertex per nonExclusively owned
3080    * point. */
3081 
3082   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3083   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
3084   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3085   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
3086 
3087   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3088   for (i=0; i<pEnd-pStart; i++) {globalNumbersOfLocalOwnedVertices[i] = pStart - 1;}
3089   offset = cumSumVertices[rank];
3090   counter = 0;
3091   for (i=0; i<pEnd-pStart; i++) {
3092     if (toBalance[i]) {
3093       if (degrees[i] > 0) {
3094         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
3095         counter++;
3096       }
3097     }
3098   }
3099 
3100   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
3101   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
3102   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3103   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3104 
3105   /* Now start building the data structure for ParMETIS */
3106 
3107   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
3108   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
3109   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3110   ierr = MatSetUp(Apre);CHKERRQ(ierr);
3111 
3112   for (i=0; i<pEnd-pStart; i++) {
3113     if (toBalance[i]) {
3114       idx = cumSumVertices[rank];
3115       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3116       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3117       else continue;
3118       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3119       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3120     }
3121   }
3122 
3123   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3124   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3125 
3126   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
3127   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
3128   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3129   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
3130   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
3131 
3132   for (i=0; i<pEnd-pStart; i++) {
3133     if (toBalance[i]) {
3134       idx = cumSumVertices[rank];
3135       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3136       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3137       else continue;
3138       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3139       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3140     }
3141   }
3142 
3143   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3144   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3145   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
3146   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3147 
3148   nparts = size;
3149   wgtflag = 2;
3150   numflag = 0;
3151   ncon = 2;
3152   real_t *tpwgts;
3153   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
3154   for (i=0; i<ncon*nparts; i++) {
3155     tpwgts[i] = 1./(nparts);
3156   }
3157 
3158   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
3159   ubvec[0] = 1.01;
3160   ubvec[1] = 1.01;
3161   lenadjncy = 0;
3162   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3163     PetscInt temp=0;
3164     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3165     lenadjncy += temp;
3166     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3167   }
3168   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
3169   lenxadj = 2 + numNonExclusivelyOwned;
3170   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
3171   xadj[0] = 0;
3172   counter = 0;
3173   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3174     PetscInt        temp=0;
3175     const PetscInt *cols;
3176     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3177     ierr = PetscArraycpy(&adjncy[counter], cols, temp);CHKERRQ(ierr);
3178     counter += temp;
3179     xadj[i+1] = counter;
3180     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3181   }
3182 
3183   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
3184   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
3185   vtxwgt[0] = numExclusivelyOwned;
3186   if (ncon>1) vtxwgt[1] = 1;
3187   for (i=0; i<numNonExclusivelyOwned; i++) {
3188     vtxwgt[ncon*(i+1)] = 1;
3189     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3190   }
3191 
3192   if (viewer) {
3193     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
3194     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
3195   }
3196   if (parallel) {
3197     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
3198     options[0] = 1;
3199     options[1] = 0; /* Verbosity */
3200     options[2] = 0; /* Seed */
3201     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3202     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
3203     if (useInitialGuess) {
3204       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
3205       PetscStackPush("ParMETIS_V3_RefineKway");
3206       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3207       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3208       PetscStackPop;
3209     } else {
3210       PetscStackPush("ParMETIS_V3_PartKway");
3211       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3212       PetscStackPop;
3213       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3214     }
3215     ierr = PetscFree(options);CHKERRQ(ierr);
3216   } else {
3217     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
3218     Mat As;
3219     PetscInt numRows;
3220     PetscInt *partGlobal;
3221     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
3222 
3223     PetscInt *numExclusivelyOwnedAll;
3224     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
3225     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3226     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
3227 
3228     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
3229     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
3230     if (!rank) {
3231       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3232       lenadjncy = 0;
3233 
3234       for (i=0; i<numRows; i++) {
3235         PetscInt temp=0;
3236         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3237         lenadjncy += temp;
3238         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3239       }
3240       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
3241       lenxadj = 1 + numRows;
3242       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
3243       xadj_g[0] = 0;
3244       counter = 0;
3245       for (i=0; i<numRows; i++) {
3246         PetscInt        temp=0;
3247         const PetscInt *cols;
3248         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3249         ierr = PetscArraycpy(&adjncy_g[counter], cols, temp);CHKERRQ(ierr);
3250         counter += temp;
3251         xadj_g[i+1] = counter;
3252         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3253       }
3254       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
3255       for (i=0; i<size; i++){
3256         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3257         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3258         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3259           vtxwgt_g[ncon*j] = 1;
3260           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3261         }
3262       }
3263       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
3264       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
3265       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3266       options[METIS_OPTION_CONTIG] = 1;
3267       PetscStackPush("METIS_PartGraphKway");
3268       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3269       PetscStackPop;
3270       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3271       ierr = PetscFree(options);CHKERRQ(ierr);
3272       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
3273       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
3274       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
3275     }
3276     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
3277 
3278     /* Now scatter the parts array. */
3279     {
3280       PetscMPIInt *counts, *mpiCumSumVertices;
3281       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
3282       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
3283       for(i=0; i<size; i++) {
3284         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3285       }
3286       for(i=0; i<=size; i++) {
3287         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3288       }
3289       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3290       ierr = PetscFree(counts);CHKERRQ(ierr);
3291       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3292     }
3293 
3294     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3295     ierr = MatDestroy(&As);CHKERRQ(ierr);
3296   }
3297 
3298   ierr = MatDestroy(&A);CHKERRQ(ierr);
3299   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3300   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3301 
3302   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3303 
3304   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3305   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3306   firstVertices[rank] = part[0];
3307   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3308   for (i=0; i<size; i++) {
3309     renumbering[firstVertices[i]] = i;
3310   }
3311   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3312     part[i] = renumbering[part[i]];
3313   }
3314   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3315   failed = (PetscInt)(part[0] != rank);
3316   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3317 
3318   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3319   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3320 
3321   if (failedGlobal > 0) {
3322     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3323     ierr = PetscFree(xadj);CHKERRQ(ierr);
3324     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3325     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3326     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3327     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3328     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3329     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3330     ierr = PetscFree(part);CHKERRQ(ierr);
3331     if (viewer) {
3332       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3333       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3334     }
3335     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3336     PetscFunctionReturn(0);
3337   }
3338 
3339   /*Let's check how well we did distributing points*/
3340   if (viewer) {
3341     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3342     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3343     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3344     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3345     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3346   }
3347 
3348   /* Now check that every vertex is owned by a process that it is actually connected to. */
3349   for (i=1; i<=numNonExclusivelyOwned; i++) {
3350     PetscInt loc = 0;
3351     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3352     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3353     if (loc<0) {
3354       part[i] = rank;
3355     }
3356   }
3357 
3358   /* Let's see how significant the influences of the previous fixing up step was.*/
3359   if (viewer) {
3360     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3361     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3362   }
3363 
3364   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3365   ierr = PetscFree(xadj);CHKERRQ(ierr);
3366   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3367   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3368 
3369   /* Almost done, now rewrite the SF to reflect the new ownership. */
3370   {
3371     PetscInt *pointsToRewrite;
3372     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3373     counter = 0;
3374     for(i=0; i<pEnd-pStart; i++) {
3375       if (toBalance[i]) {
3376         if (isNonExclusivelyOwned[i]) {
3377           pointsToRewrite[counter] = i + pStart;
3378           counter++;
3379         }
3380       }
3381     }
3382     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3383     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3384   }
3385 
3386   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3387   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3388   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3389   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3390   ierr = PetscFree(part);CHKERRQ(ierr);
3391   if (viewer) {
3392     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3393     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3394   }
3395   if (success) *success = PETSC_TRUE;
3396   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3397 #else
3398   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3399 #endif
3400   PetscFunctionReturn(0);
3401 }
3402