xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 30b0ce1b8d372b7ac467a0571ddcc09fc86b16af)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
31 
32 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
33 {
34   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
35   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
36   IS             cellNumbering;
37   const PetscInt *cellNum;
38   PetscBool      useCone, useClosure;
39   PetscSection   section;
40   PetscSegBuffer adjBuffer;
41   PetscSF        sfPoint;
42   PetscInt       *adjCells = NULL, *remoteCells = NULL;
43   const PetscInt *local;
44   PetscInt       nroots, nleaves, l;
45   PetscMPIInt    rank;
46   PetscErrorCode ierr;
47 
48   PetscFunctionBegin;
49   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
50   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
51   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
52   if (dim != depth) {
53     /* We do not handle the uninterpolated case here */
54     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
55     /* DMPlexCreateNeighborCSR does not make a numbering */
56     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
57     /* Different behavior for empty graphs */
58     if (!*numVertices) {
59       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
60       (*offsets)[0] = 0;
61     }
62     /* Broken in parallel */
63     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
64     PetscFunctionReturn(0);
65   }
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
74   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
75   ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
76   if (globalNumbering) {
77     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
78     *globalNumbering = cellNumbering;
79   }
80   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
82      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
83    */
84   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
85   if (nroots >= 0) {
86     PetscInt fStart, fEnd, f;
87 
88     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
89     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
90     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
91     for (f = fStart; f < fEnd; ++f) {
92       const PetscInt *support;
93       PetscInt        supportSize;
94 
95       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
96       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
97       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
98       else if (supportSize == 2) {
99         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
100         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
101         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
102         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
103       }
104       /* Handle non-conforming meshes */
105       if (supportSize > 2) {
106         PetscInt        numChildren, i;
107         const PetscInt *children;
108 
109         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
110         for (i = 0; i < numChildren; ++i) {
111           const PetscInt child = children[i];
112           if (fStart <= child && child < fEnd) {
113             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
114             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
115             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
116             else if (supportSize == 2) {
117               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
118               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
119               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
120               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
121             }
122           }
123         }
124       }
125     }
126     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
127     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
128     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
129     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
130     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
131   }
132   /* Combine local and global adjacencies */
133   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
134     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
135     if (nroots > 0) {if (cellNum[p] < 0) continue;}
136     /* Add remote cells */
137     if (remoteCells) {
138       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
139       PetscInt       coneSize, numChildren, c, i;
140       const PetscInt *cone, *children;
141 
142       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
143       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
144       for (c = 0; c < coneSize; ++c) {
145         const PetscInt point = cone[c];
146         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
147           PetscInt *PETSC_RESTRICT pBuf;
148           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
149           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
150           *pBuf = remoteCells[point];
151         }
152         /* Handle non-conforming meshes */
153         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
154         for (i = 0; i < numChildren; ++i) {
155           const PetscInt child = children[i];
156           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
157             PetscInt *PETSC_RESTRICT pBuf;
158             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
159             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
160             *pBuf = remoteCells[child];
161           }
162         }
163       }
164     }
165     /* Add local cells */
166     adjSize = PETSC_DETERMINE;
167     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
168     for (a = 0; a < adjSize; ++a) {
169       const PetscInt point = adj[a];
170       if (point != p && pStart <= point && point < pEnd) {
171         PetscInt *PETSC_RESTRICT pBuf;
172         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
173         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
174         *pBuf = DMPlex_GlobalID(cellNum[point]);
175       }
176     }
177     (*numVertices)++;
178   }
179   ierr = PetscFree(adj);CHKERRQ(ierr);
180   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
181   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
182 
183   /* Derive CSR graph from section/segbuffer */
184   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
185   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
186   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
187   for (idx = 0, p = pStart; p < pEnd; p++) {
188     if (nroots > 0) {if (cellNum[p] < 0) continue;}
189     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
190   }
191   vOffsets[*numVertices] = size;
192   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
193 
194   if (nroots >= 0) {
195     /* Filter out duplicate edges using section/segbuffer */
196     ierr = PetscSectionReset(section);CHKERRQ(ierr);
197     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
198     for (p = 0; p < *numVertices; p++) {
199       PetscInt start = vOffsets[p], end = vOffsets[p+1];
200       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
201       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
202       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
203       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
204       ierr = PetscMemcpy(edges, &graph[start], numEdges*sizeof(*edges));CHKERRQ(ierr);
205     }
206     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
207     ierr = PetscFree(graph);CHKERRQ(ierr);
208     /* Derive CSR graph from section/segbuffer */
209     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
210     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
211     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
212     for (idx = 0, p = 0; p < *numVertices; p++) {
213       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
214     }
215     vOffsets[*numVertices] = size;
216     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
217   } else {
218     /* Sort adjacencies (not strictly necessary) */
219     for (p = 0; p < *numVertices; p++) {
220       PetscInt start = vOffsets[p], end = vOffsets[p+1];
221       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
222     }
223   }
224 
225   if (offsets) *offsets = vOffsets;
226   if (adjacency) *adjacency = graph;
227 
228   /* Cleanup */
229   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
230   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
231   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
232   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
233   PetscFunctionReturn(0);
234 }
235 
236 static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
237 {
238   Mat            conn, CSR;
239   IS             fis, cis, cis_own;
240   PetscSF        sfPoint;
241   const PetscInt *rows, *cols, *ii, *jj;
242   PetscInt       *idxs,*idxs2;
243   PetscInt       dim, depth, floc, cloc, i, M, N, c, m, cStart, cEnd, fStart, fEnd;
244   PetscMPIInt    rank;
245   PetscBool      flg;
246   PetscErrorCode ierr;
247 
248   PetscFunctionBegin;
249   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
250   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
251   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
252   if (dim != depth) {
253     /* We do not handle the uninterpolated case here */
254     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
255     /* DMPlexCreateNeighborCSR does not make a numbering */
256     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
257     /* Different behavior for empty graphs */
258     if (!*numVertices) {
259       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
260       (*offsets)[0] = 0;
261     }
262     /* Broken in parallel */
263     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
264     PetscFunctionReturn(0);
265   }
266   /* Interpolated and parallel case */
267 
268   /* numbering */
269   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
270   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
271   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
272   ierr = DMPlexCreateNumbering_Internal(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
273   ierr = DMPlexCreateNumbering_Internal(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
274   if (globalNumbering) {
275     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
276   }
277 
278   /* get positive global ids and local sizes for facets and cells */
279   ierr = ISGetLocalSize(fis, &m);CHKERRQ(ierr);
280   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
281   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
282   for (i = 0, floc = 0; i < m; i++) {
283     const PetscInt p = rows[i];
284 
285     if (p < 0) {
286       idxs[i] = -(p+1);
287     } else {
288       idxs[i] = p;
289       floc   += 1;
290     }
291   }
292   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
293   ierr = ISDestroy(&fis);CHKERRQ(ierr);
294   ierr = ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);CHKERRQ(ierr);
295 
296   ierr = ISGetLocalSize(cis, &m);CHKERRQ(ierr);
297   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
298   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
299   ierr = PetscMalloc1(m, &idxs2);CHKERRQ(ierr);
300   for (i = 0, cloc = 0; i < m; i++) {
301     const PetscInt p = cols[i];
302 
303     if (p < 0) {
304       idxs[i] = -(p+1);
305     } else {
306       idxs[i]       = p;
307       idxs2[cloc++] = p;
308     }
309   }
310   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
311   ierr = ISDestroy(&cis);CHKERRQ(ierr);
312   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);CHKERRQ(ierr);
313   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);CHKERRQ(ierr);
314 
315   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
316   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
317   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
318   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
319   ierr = DMPlexGetMaxSizes(dm, NULL, &m);CHKERRQ(ierr);
320   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
321 
322   /* Assemble matrix */
323   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
324   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
325   for (c = cStart; c < cEnd; c++) {
326     const PetscInt *cone;
327     PetscInt        coneSize, row, col, f;
328 
329     col  = cols[c-cStart];
330     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
331     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
332     for (f = 0; f < coneSize; f++) {
333       const PetscScalar v = 1.0;
334       const PetscInt *children;
335       PetscInt        numChildren, ch;
336 
337       row  = rows[cone[f]-fStart];
338       ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
339 
340       /* non-conforming meshes */
341       ierr = DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);CHKERRQ(ierr);
342       for (ch = 0; ch < numChildren; ch++) {
343         const PetscInt child = children[ch];
344 
345         if (child < fStart || child >= fEnd) continue;
346         row  = rows[child-fStart];
347         ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
348       }
349     }
350   }
351   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
352   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
353   ierr = ISDestroy(&fis);CHKERRQ(ierr);
354   ierr = ISDestroy(&cis);CHKERRQ(ierr);
355   ierr = MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
356   ierr = MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
357 
358   /* Get parallel CSR by doing conn^T * conn */
359   ierr = MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);CHKERRQ(ierr);
360   ierr = MatDestroy(&conn);CHKERRQ(ierr);
361 
362   /* extract local part of the CSR */
363   ierr = MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);CHKERRQ(ierr);
364   ierr = MatDestroy(&CSR);CHKERRQ(ierr);
365   ierr = MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
366   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
367 
368   /* get back requested output */
369   if (numVertices) *numVertices = m;
370   if (offsets) {
371     ierr = PetscCalloc1(m+1, &idxs);CHKERRQ(ierr);
372     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
373     *offsets = idxs;
374   }
375   if (adjacency) {
376     ierr = PetscMalloc1(ii[m] - m, &idxs);CHKERRQ(ierr);
377     ierr = ISGetIndices(cis_own, &rows);CHKERRQ(ierr);
378     for (i = 0, c = 0; i < m; i++) {
379       PetscInt j, g = rows[i];
380 
381       for (j = ii[i]; j < ii[i+1]; j++) {
382         if (jj[j] == g) continue; /* again, self-connectivity */
383         idxs[c++] = jj[j];
384       }
385     }
386     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
387     ierr = ISRestoreIndices(cis_own, &rows);CHKERRQ(ierr);
388     *adjacency = idxs;
389   }
390 
391   /* cleanup */
392   ierr = ISDestroy(&cis_own);CHKERRQ(ierr);
393   ierr = MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
394   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
395   ierr = MatDestroy(&conn);CHKERRQ(ierr);
396   PetscFunctionReturn(0);
397 }
398 
399 /*@C
400   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
401 
402   Input Parameters:
403 + dm      - The mesh DM dm
404 - height  - Height of the strata from which to construct the graph
405 
406   Output Parameter:
407 + numVertices     - Number of vertices in the graph
408 . offsets         - Point offsets in the graph
409 . adjacency       - Point connectivity in the graph
410 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
411 
412   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
413   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().
414 
415   Level: developer
416 
417 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
418 @*/
419 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
420 {
421   PetscErrorCode ierr;
422   PetscBool      usemat = PETSC_FALSE;
423 
424   PetscFunctionBegin;
425   ierr = PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);CHKERRQ(ierr);
426   if (usemat) {
427     ierr = DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
428   } else {
429     ierr = DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
430   }
431   PetscFunctionReturn(0);
432 }
433 
434 /*@C
435   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
436 
437   Collective
438 
439   Input Arguments:
440 + dm - The DMPlex
441 - cellHeight - The height of mesh points to treat as cells (default should be 0)
442 
443   Output Arguments:
444 + numVertices - The number of local vertices in the graph, or cells in the mesh.
445 . offsets     - The offset to the adjacency list for each cell
446 - adjacency   - The adjacency list for all cells
447 
448   Note: This is suitable for input to a mesh partitioner like ParMetis.
449 
450   Level: advanced
451 
452 .seealso: DMPlexCreate()
453 @*/
454 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
455 {
456   const PetscInt maxFaceCases = 30;
457   PetscInt       numFaceCases = 0;
458   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
459   PetscInt      *off, *adj;
460   PetscInt      *neighborCells = NULL;
461   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   /* For parallel partitioning, I think you have to communicate supports */
466   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
467   cellDim = dim - cellHeight;
468   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
469   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
470   if (cEnd - cStart == 0) {
471     if (numVertices) *numVertices = 0;
472     if (offsets)   *offsets   = NULL;
473     if (adjacency) *adjacency = NULL;
474     PetscFunctionReturn(0);
475   }
476   numCells  = cEnd - cStart;
477   faceDepth = depth - cellHeight;
478   if (dim == depth) {
479     PetscInt f, fStart, fEnd;
480 
481     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
482     /* Count neighboring cells */
483     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
484     for (f = fStart; f < fEnd; ++f) {
485       const PetscInt *support;
486       PetscInt        supportSize;
487       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
488       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
489       if (supportSize == 2) {
490         PetscInt numChildren;
491 
492         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
493         if (!numChildren) {
494           ++off[support[0]-cStart+1];
495           ++off[support[1]-cStart+1];
496         }
497       }
498     }
499     /* Prefix sum */
500     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
501     if (adjacency) {
502       PetscInt *tmp;
503 
504       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
505       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
506       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
507       /* Get neighboring cells */
508       for (f = fStart; f < fEnd; ++f) {
509         const PetscInt *support;
510         PetscInt        supportSize;
511         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
512         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
513         if (supportSize == 2) {
514           PetscInt numChildren;
515 
516           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
517           if (!numChildren) {
518             adj[tmp[support[0]-cStart]++] = support[1];
519             adj[tmp[support[1]-cStart]++] = support[0];
520           }
521         }
522       }
523 #if defined(PETSC_USE_DEBUG)
524       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
525 #endif
526       ierr = PetscFree(tmp);CHKERRQ(ierr);
527     }
528     if (numVertices) *numVertices = numCells;
529     if (offsets)   *offsets   = off;
530     if (adjacency) *adjacency = adj;
531     PetscFunctionReturn(0);
532   }
533   /* Setup face recognition */
534   if (faceDepth == 1) {
535     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
536 
537     for (c = cStart; c < cEnd; ++c) {
538       PetscInt corners;
539 
540       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
541       if (!cornersSeen[corners]) {
542         PetscInt nFV;
543 
544         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
545         cornersSeen[corners] = 1;
546 
547         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
548 
549         numFaceVertices[numFaceCases++] = nFV;
550       }
551     }
552   }
553   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
554   /* Count neighboring cells */
555   for (cell = cStart; cell < cEnd; ++cell) {
556     PetscInt numNeighbors = PETSC_DETERMINE, n;
557 
558     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
559     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
560     for (n = 0; n < numNeighbors; ++n) {
561       PetscInt        cellPair[2];
562       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
563       PetscInt        meetSize = 0;
564       const PetscInt *meet    = NULL;
565 
566       cellPair[0] = cell; cellPair[1] = neighborCells[n];
567       if (cellPair[0] == cellPair[1]) continue;
568       if (!found) {
569         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
570         if (meetSize) {
571           PetscInt f;
572 
573           for (f = 0; f < numFaceCases; ++f) {
574             if (numFaceVertices[f] == meetSize) {
575               found = PETSC_TRUE;
576               break;
577             }
578           }
579         }
580         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
581       }
582       if (found) ++off[cell-cStart+1];
583     }
584   }
585   /* Prefix sum */
586   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
587 
588   if (adjacency) {
589     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
590     /* Get neighboring cells */
591     for (cell = cStart; cell < cEnd; ++cell) {
592       PetscInt numNeighbors = PETSC_DETERMINE, n;
593       PetscInt cellOffset   = 0;
594 
595       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
596       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
597       for (n = 0; n < numNeighbors; ++n) {
598         PetscInt        cellPair[2];
599         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
600         PetscInt        meetSize = 0;
601         const PetscInt *meet    = NULL;
602 
603         cellPair[0] = cell; cellPair[1] = neighborCells[n];
604         if (cellPair[0] == cellPair[1]) continue;
605         if (!found) {
606           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
607           if (meetSize) {
608             PetscInt f;
609 
610             for (f = 0; f < numFaceCases; ++f) {
611               if (numFaceVertices[f] == meetSize) {
612                 found = PETSC_TRUE;
613                 break;
614               }
615             }
616           }
617           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
618         }
619         if (found) {
620           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
621           ++cellOffset;
622         }
623       }
624     }
625   }
626   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
627   if (numVertices) *numVertices = numCells;
628   if (offsets)   *offsets   = off;
629   if (adjacency) *adjacency = adj;
630   PetscFunctionReturn(0);
631 }
632 
633 /*@C
634   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
635 
636   Not Collective
637 
638   Input Parameters:
639 + name        - The name of a new user-defined creation routine
640 - create_func - The creation routine itself
641 
642   Notes:
643   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
644 
645   Sample usage:
646 .vb
647     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
648 .ve
649 
650   Then, your PetscPartitioner type can be chosen with the procedural interface via
651 .vb
652     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
653     PetscPartitionerSetType(PetscPartitioner, "my_part");
654 .ve
655    or at runtime via the option
656 .vb
657     -petscpartitioner_type my_part
658 .ve
659 
660   Level: advanced
661 
662 .keywords: PetscPartitioner, register
663 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
664 
665 @*/
666 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
667 {
668   PetscErrorCode ierr;
669 
670   PetscFunctionBegin;
671   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
672   PetscFunctionReturn(0);
673 }
674 
675 /*@C
676   PetscPartitionerSetType - Builds a particular PetscPartitioner
677 
678   Collective on PetscPartitioner
679 
680   Input Parameters:
681 + part - The PetscPartitioner object
682 - name - The kind of partitioner
683 
684   Options Database Key:
685 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
686 
687   Level: intermediate
688 
689 .keywords: PetscPartitioner, set, type
690 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
691 @*/
692 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
693 {
694   PetscErrorCode (*r)(PetscPartitioner);
695   PetscBool      match;
696   PetscErrorCode ierr;
697 
698   PetscFunctionBegin;
699   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
700   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
701   if (match) PetscFunctionReturn(0);
702 
703   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
704   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
705   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
706 
707   if (part->ops->destroy) {
708     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
709   }
710   part->noGraph = PETSC_FALSE;
711   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
712   ierr = (*r)(part);CHKERRQ(ierr);
713   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
714   PetscFunctionReturn(0);
715 }
716 
717 /*@C
718   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
719 
720   Not Collective
721 
722   Input Parameter:
723 . part - The PetscPartitioner
724 
725   Output Parameter:
726 . name - The PetscPartitioner type name
727 
728   Level: intermediate
729 
730 .keywords: PetscPartitioner, get, type, name
731 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
732 @*/
733 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
734 {
735   PetscErrorCode ierr;
736 
737   PetscFunctionBegin;
738   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
739   PetscValidPointer(name, 2);
740   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
741   *name = ((PetscObject) part)->type_name;
742   PetscFunctionReturn(0);
743 }
744 
745 /*@C
746   PetscPartitionerView - Views a PetscPartitioner
747 
748   Collective on PetscPartitioner
749 
750   Input Parameter:
751 + part - the PetscPartitioner object to view
752 - v    - the viewer
753 
754   Level: developer
755 
756 .seealso: PetscPartitionerDestroy()
757 @*/
758 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
759 {
760   PetscMPIInt    size;
761   PetscBool      isascii;
762   PetscErrorCode ierr;
763 
764   PetscFunctionBegin;
765   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
766   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
767   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
768   if (isascii) {
769     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
770     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
771     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
772     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
773     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
774     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
775     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
776   }
777   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
778   PetscFunctionReturn(0);
779 }
780 
781 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
782 {
783   PetscFunctionBegin;
784   if (!currentType) {
785 #if defined(PETSC_HAVE_PARMETIS)
786     *defaultType = PETSCPARTITIONERPARMETIS;
787 #elif defined(PETSC_HAVE_PTSCOTCH)
788     *defaultType = PETSCPARTITIONERPTSCOTCH;
789 #elif defined(PETSC_HAVE_CHACO)
790     *defaultType = PETSCPARTITIONERCHACO;
791 #else
792     *defaultType = PETSCPARTITIONERSIMPLE;
793 #endif
794   } else {
795     *defaultType = currentType;
796   }
797   PetscFunctionReturn(0);
798 }
799 
800 /*@
801   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
802 
803   Collective on PetscPartitioner
804 
805   Input Parameter:
806 . part - the PetscPartitioner object to set options for
807 
808   Level: developer
809 
810 .seealso: PetscPartitionerView()
811 @*/
812 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
813 {
814   const char    *defaultType;
815   char           name[256];
816   PetscBool      flg;
817   PetscErrorCode ierr;
818 
819   PetscFunctionBegin;
820   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
821   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
822   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
823   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
824   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
825   if (flg) {
826     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
827   } else if (!((PetscObject) part)->type_name) {
828     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
829   }
830   if (part->ops->setfromoptions) {
831     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
832   }
833   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
834   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
835   /* process any options handlers added with PetscObjectAddOptionsHandler() */
836   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
837   ierr = PetscOptionsEnd();CHKERRQ(ierr);
838   PetscFunctionReturn(0);
839 }
840 
841 /*@C
842   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
843 
844   Collective on PetscPartitioner
845 
846   Input Parameter:
847 . part - the PetscPartitioner object to setup
848 
849   Level: developer
850 
851 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
852 @*/
853 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
854 {
855   PetscErrorCode ierr;
856 
857   PetscFunctionBegin;
858   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
859   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
860   PetscFunctionReturn(0);
861 }
862 
863 /*@
864   PetscPartitionerDestroy - Destroys a PetscPartitioner object
865 
866   Collective on PetscPartitioner
867 
868   Input Parameter:
869 . part - the PetscPartitioner object to destroy
870 
871   Level: developer
872 
873 .seealso: PetscPartitionerView()
874 @*/
875 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
876 {
877   PetscErrorCode ierr;
878 
879   PetscFunctionBegin;
880   if (!*part) PetscFunctionReturn(0);
881   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
882 
883   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
884   ((PetscObject) (*part))->refct = 0;
885 
886   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
887   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
888   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 /*@
893   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
894 
895   Collective on MPI_Comm
896 
897   Input Parameter:
898 . comm - The communicator for the PetscPartitioner object
899 
900   Output Parameter:
901 . part - The PetscPartitioner object
902 
903   Level: beginner
904 
905 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
906 @*/
907 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
908 {
909   PetscPartitioner p;
910   const char       *partitionerType = NULL;
911   PetscErrorCode   ierr;
912 
913   PetscFunctionBegin;
914   PetscValidPointer(part, 2);
915   *part = NULL;
916   ierr = DMInitializePackage();CHKERRQ(ierr);
917 
918   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
919   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
920   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
921 
922   p->edgeCut = 0;
923   p->balance = 0.0;
924 
925   *part = p;
926   PetscFunctionReturn(0);
927 }
928 
929 /*@
930   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
931 
932   Collective on DM
933 
934   Input Parameters:
935 + part    - The PetscPartitioner
936 - dm      - The mesh DM
937 
938   Output Parameters:
939 + partSection     - The PetscSection giving the division of points by partition
940 - partition       - The list of points by partition
941 
942   Options Database:
943 . -petscpartitioner_view_graph - View the graph we are partitioning
944 
945   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
946 
947   Level: developer
948 
949 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
950 @*/
951 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
952 {
953   PetscMPIInt    size;
954   PetscErrorCode ierr;
955 
956   PetscFunctionBegin;
957   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
958   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
959   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
960   PetscValidPointer(partition, 5);
961   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
962   if (size == 1) {
963     PetscInt *points;
964     PetscInt  cStart, cEnd, c;
965 
966     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
967     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
968     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
969     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
970     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
971     for (c = cStart; c < cEnd; ++c) points[c] = c;
972     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
973     PetscFunctionReturn(0);
974   }
975   if (part->height == 0) {
976     PetscInt numVertices = 0;
977     PetscInt *start     = NULL;
978     PetscInt *adjacency = NULL;
979     IS       globalNumbering;
980 
981     if (!part->noGraph || part->viewGraph) {
982       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
983     } else { /* only compute the number of owned local vertices */
984       const PetscInt *idxs;
985       PetscInt       p, pStart, pEnd;
986 
987       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
988       ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
989       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
990       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
991       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
992     }
993     if (part->viewGraph) {
994       PetscViewer viewer = part->viewerGraph;
995       PetscBool   isascii;
996       PetscInt    v, i;
997       PetscMPIInt rank;
998 
999       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
1000       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
1001       if (isascii) {
1002         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1003         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
1004         for (v = 0; v < numVertices; ++v) {
1005           const PetscInt s = start[v];
1006           const PetscInt e = start[v+1];
1007 
1008           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
1009           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
1010           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
1011         }
1012         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1013         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1014       }
1015     }
1016     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no partitioning method");
1017     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
1018     ierr = PetscFree(start);CHKERRQ(ierr);
1019     ierr = PetscFree(adjacency);CHKERRQ(ierr);
1020     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1021       const PetscInt *globalNum;
1022       const PetscInt *partIdx;
1023       PetscInt       *map, cStart, cEnd;
1024       PetscInt       *adjusted, i, localSize, offset;
1025       IS             newPartition;
1026 
1027       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
1028       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
1029       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1030       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
1031       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
1032       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1033       for (i = cStart, offset = 0; i < cEnd; i++) {
1034         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1035       }
1036       for (i = 0; i < localSize; i++) {
1037         adjusted[i] = map[partIdx[i]];
1038       }
1039       ierr = PetscFree(map);CHKERRQ(ierr);
1040       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
1041       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1042       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
1043       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
1044       ierr = ISDestroy(partition);CHKERRQ(ierr);
1045       *partition = newPartition;
1046     }
1047   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1048   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
1049   PetscFunctionReturn(0);
1050 }
1051 
1052 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1053 {
1054   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1055   PetscErrorCode          ierr;
1056 
1057   PetscFunctionBegin;
1058   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1059   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1060   ierr = PetscFree(p);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1065 {
1066   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1067   PetscErrorCode          ierr;
1068 
1069   PetscFunctionBegin;
1070   if (p->random) {
1071     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1072     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
1073     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1074   }
1075   PetscFunctionReturn(0);
1076 }
1077 
1078 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1079 {
1080   PetscBool      iascii;
1081   PetscErrorCode ierr;
1082 
1083   PetscFunctionBegin;
1084   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1085   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1086   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1087   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1092 {
1093   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1094   PetscErrorCode          ierr;
1095 
1096   PetscFunctionBegin;
1097   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
1098   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
1099   ierr = PetscOptionsTail();CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1104 {
1105   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1106   PetscInt                np;
1107   PetscErrorCode          ierr;
1108 
1109   PetscFunctionBegin;
1110   if (p->random) {
1111     PetscRandom r;
1112     PetscInt   *sizes, *points, v, p;
1113     PetscMPIInt rank;
1114 
1115     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1116     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1117     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
1118     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
1119     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
1120     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1121     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1122     for (v = numVertices-1; v > 0; --v) {
1123       PetscReal val;
1124       PetscInt  w, tmp;
1125 
1126       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
1127       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
1128       w    = PetscFloorReal(val);
1129       tmp       = points[v];
1130       points[v] = points[w];
1131       points[w] = tmp;
1132     }
1133     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1134     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
1135     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
1136   }
1137   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1138   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
1139   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1140   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
1141   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1142   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
1143   *partition = p->partition;
1144   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1149 {
1150   PetscFunctionBegin;
1151   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1152   part->ops->view           = PetscPartitionerView_Shell;
1153   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1154   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1155   part->ops->partition      = PetscPartitionerPartition_Shell;
1156   PetscFunctionReturn(0);
1157 }
1158 
1159 /*MC
1160   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
1161 
1162   Level: intermediate
1163 
1164 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1165 M*/
1166 
1167 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1168 {
1169   PetscPartitioner_Shell *p;
1170   PetscErrorCode          ierr;
1171 
1172   PetscFunctionBegin;
1173   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1174   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1175   part->data = p;
1176 
1177   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1178   p->random = PETSC_FALSE;
1179   PetscFunctionReturn(0);
1180 }
1181 
1182 /*@C
1183   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1184 
1185   Collective on Part
1186 
1187   Input Parameters:
1188 + part   - The PetscPartitioner
1189 . size   - The number of partitions
1190 . sizes  - array of size size (or NULL) providing the number of points in each partition
1191 - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1192 
1193   Level: developer
1194 
1195   Notes:
1196 
1197     It is safe to free the sizes and points arrays after use in this routine.
1198 
1199 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1200 @*/
1201 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1202 {
1203   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1204   PetscInt                proc, numPoints;
1205   PetscErrorCode          ierr;
1206 
1207   PetscFunctionBegin;
1208   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1209   if (sizes)  {PetscValidPointer(sizes, 3);}
1210   if (points) {PetscValidPointer(points, 4);}
1211   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1212   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1213   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1214   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1215   if (sizes) {
1216     for (proc = 0; proc < size; ++proc) {
1217       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1218     }
1219   }
1220   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1221   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1222   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 /*@
1227   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1228 
1229   Collective on Part
1230 
1231   Input Parameters:
1232 + part   - The PetscPartitioner
1233 - random - The flag to use a random partition
1234 
1235   Level: intermediate
1236 
1237 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1238 @*/
1239 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1240 {
1241   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1242 
1243   PetscFunctionBegin;
1244   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1245   p->random = random;
1246   PetscFunctionReturn(0);
1247 }
1248 
1249 /*@
1250   PetscPartitionerShellGetRandom - get the flag to use a random partition
1251 
1252   Collective on Part
1253 
1254   Input Parameter:
1255 . part   - The PetscPartitioner
1256 
1257   Output Parameter
1258 . random - The flag to use a random partition
1259 
1260   Level: intermediate
1261 
1262 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1263 @*/
1264 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1265 {
1266   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1267 
1268   PetscFunctionBegin;
1269   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1270   PetscValidPointer(random, 2);
1271   *random = p->random;
1272   PetscFunctionReturn(0);
1273 }
1274 
1275 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1276 {
1277   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1278   PetscErrorCode          ierr;
1279 
1280   PetscFunctionBegin;
1281   ierr = PetscFree(p);CHKERRQ(ierr);
1282   PetscFunctionReturn(0);
1283 }
1284 
1285 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1286 {
1287   PetscFunctionBegin;
1288   PetscFunctionReturn(0);
1289 }
1290 
1291 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1292 {
1293   PetscBool      iascii;
1294   PetscErrorCode ierr;
1295 
1296   PetscFunctionBegin;
1297   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1298   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1299   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1300   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1301   PetscFunctionReturn(0);
1302 }
1303 
1304 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1305 {
1306   MPI_Comm       comm;
1307   PetscInt       np;
1308   PetscMPIInt    size;
1309   PetscErrorCode ierr;
1310 
1311   PetscFunctionBegin;
1312   comm = PetscObjectComm((PetscObject)part);
1313   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1314   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1315   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1316   if (size == 1) {
1317     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1318   } else {
1319     PetscMPIInt rank;
1320     PetscInt nvGlobal, *offsets, myFirst, myLast;
1321 
1322     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1323     offsets[0] = 0;
1324     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1325     for (np = 2; np <= size; np++) {
1326       offsets[np] += offsets[np-1];
1327     }
1328     nvGlobal = offsets[size];
1329     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1330     myFirst = offsets[rank];
1331     myLast  = offsets[rank + 1] - 1;
1332     ierr = PetscFree(offsets);CHKERRQ(ierr);
1333     if (numVertices) {
1334       PetscInt firstPart = 0, firstLargePart = 0;
1335       PetscInt lastPart = 0, lastLargePart = 0;
1336       PetscInt rem = nvGlobal % nparts;
1337       PetscInt pSmall = nvGlobal/nparts;
1338       PetscInt pBig = nvGlobal/nparts + 1;
1339 
1340 
1341       if (rem) {
1342         firstLargePart = myFirst / pBig;
1343         lastLargePart  = myLast  / pBig;
1344 
1345         if (firstLargePart < rem) {
1346           firstPart = firstLargePart;
1347         } else {
1348           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1349         }
1350         if (lastLargePart < rem) {
1351           lastPart = lastLargePart;
1352         } else {
1353           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1354         }
1355       } else {
1356         firstPart = myFirst / (nvGlobal/nparts);
1357         lastPart  = myLast  / (nvGlobal/nparts);
1358       }
1359 
1360       for (np = firstPart; np <= lastPart; np++) {
1361         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1362         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1363 
1364         PartStart = PetscMax(PartStart,myFirst);
1365         PartEnd   = PetscMin(PartEnd,myLast+1);
1366         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1367       }
1368     }
1369   }
1370   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1371   PetscFunctionReturn(0);
1372 }
1373 
1374 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1375 {
1376   PetscFunctionBegin;
1377   part->noGraph        = PETSC_TRUE;
1378   part->ops->view      = PetscPartitionerView_Simple;
1379   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1380   part->ops->partition = PetscPartitionerPartition_Simple;
1381   PetscFunctionReturn(0);
1382 }
1383 
1384 /*MC
1385   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1386 
1387   Level: intermediate
1388 
1389 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1390 M*/
1391 
1392 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1393 {
1394   PetscPartitioner_Simple *p;
1395   PetscErrorCode           ierr;
1396 
1397   PetscFunctionBegin;
1398   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1399   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1400   part->data = p;
1401 
1402   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1403   PetscFunctionReturn(0);
1404 }
1405 
1406 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1407 {
1408   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1409   PetscErrorCode          ierr;
1410 
1411   PetscFunctionBegin;
1412   ierr = PetscFree(p);CHKERRQ(ierr);
1413   PetscFunctionReturn(0);
1414 }
1415 
1416 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1417 {
1418   PetscFunctionBegin;
1419   PetscFunctionReturn(0);
1420 }
1421 
1422 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1423 {
1424   PetscBool      iascii;
1425   PetscErrorCode ierr;
1426 
1427   PetscFunctionBegin;
1428   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1429   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1430   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1431   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1432   PetscFunctionReturn(0);
1433 }
1434 
1435 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1436 {
1437   PetscInt       np;
1438   PetscErrorCode ierr;
1439 
1440   PetscFunctionBegin;
1441   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1442   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1443   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1444   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1445   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1446   PetscFunctionReturn(0);
1447 }
1448 
1449 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1450 {
1451   PetscFunctionBegin;
1452   part->noGraph        = PETSC_TRUE;
1453   part->ops->view      = PetscPartitionerView_Gather;
1454   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1455   part->ops->partition = PetscPartitionerPartition_Gather;
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 /*MC
1460   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1461 
1462   Level: intermediate
1463 
1464 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1465 M*/
1466 
1467 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1468 {
1469   PetscPartitioner_Gather *p;
1470   PetscErrorCode           ierr;
1471 
1472   PetscFunctionBegin;
1473   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1474   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1475   part->data = p;
1476 
1477   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1478   PetscFunctionReturn(0);
1479 }
1480 
1481 
1482 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1483 {
1484   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1485   PetscErrorCode          ierr;
1486 
1487   PetscFunctionBegin;
1488   ierr = PetscFree(p);CHKERRQ(ierr);
1489   PetscFunctionReturn(0);
1490 }
1491 
1492 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1493 {
1494   PetscFunctionBegin;
1495   PetscFunctionReturn(0);
1496 }
1497 
1498 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1499 {
1500   PetscBool      iascii;
1501   PetscErrorCode ierr;
1502 
1503   PetscFunctionBegin;
1504   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1505   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1506   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1507   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1508   PetscFunctionReturn(0);
1509 }
1510 
1511 #if defined(PETSC_HAVE_CHACO)
1512 #if defined(PETSC_HAVE_UNISTD_H)
1513 #include <unistd.h>
1514 #endif
1515 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1516 #include <chaco.h>
1517 #else
1518 /* Older versions of Chaco do not have an include file */
1519 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1520                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1521                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1522                        int mesh_dims[3], double *goal, int global_method, int local_method,
1523                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1524 #endif
1525 extern int FREE_GRAPH;
1526 #endif
1527 
1528 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1529 {
1530 #if defined(PETSC_HAVE_CHACO)
1531   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1532   MPI_Comm       comm;
1533   int            nvtxs          = numVertices; /* number of vertices in full graph */
1534   int           *vwgts          = NULL;   /* weights for all vertices */
1535   float         *ewgts          = NULL;   /* weights for all edges */
1536   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1537   char          *outassignname  = NULL;   /*  name of assignment output file */
1538   char          *outfilename    = NULL;   /* output file name */
1539   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1540   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1541   int            mesh_dims[3];            /* dimensions of mesh of processors */
1542   double        *goal          = NULL;    /* desired set sizes for each set */
1543   int            global_method = 1;       /* global partitioning algorithm */
1544   int            local_method  = 1;       /* local partitioning algorithm */
1545   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1546   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1547   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1548   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1549   long           seed          = 123636512; /* for random graph mutations */
1550 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1551   int           *assignment;              /* Output partition */
1552 #else
1553   short int     *assignment;              /* Output partition */
1554 #endif
1555   int            fd_stdout, fd_pipe[2];
1556   PetscInt      *points;
1557   int            i, v, p;
1558   PetscErrorCode ierr;
1559 
1560   PetscFunctionBegin;
1561   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1562 #if defined (PETSC_USE_DEBUG)
1563   {
1564     int ival,isum;
1565     PetscBool distributed;
1566 
1567     ival = (numVertices > 0);
1568     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1569     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1570     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1571   }
1572 #endif
1573   if (!numVertices) {
1574     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1575     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1576     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1577     PetscFunctionReturn(0);
1578   }
1579   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1580   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1581 
1582   if (global_method == INERTIAL_METHOD) {
1583     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1584     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1585   }
1586   mesh_dims[0] = nparts;
1587   mesh_dims[1] = 1;
1588   mesh_dims[2] = 1;
1589   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1590   /* Chaco outputs to stdout. We redirect this to a buffer. */
1591   /* TODO: check error codes for UNIX calls */
1592 #if defined(PETSC_HAVE_UNISTD_H)
1593   {
1594     int piperet;
1595     piperet = pipe(fd_pipe);
1596     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1597     fd_stdout = dup(1);
1598     close(1);
1599     dup2(fd_pipe[1], 1);
1600   }
1601 #endif
1602   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1603                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1604                    vmax, ndims, eigtol, seed);
1605 #if defined(PETSC_HAVE_UNISTD_H)
1606   {
1607     char msgLog[10000];
1608     int  count;
1609 
1610     fflush(stdout);
1611     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1612     if (count < 0) count = 0;
1613     msgLog[count] = 0;
1614     close(1);
1615     dup2(fd_stdout, 1);
1616     close(fd_stdout);
1617     close(fd_pipe[0]);
1618     close(fd_pipe[1]);
1619     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1620   }
1621 #else
1622   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1623 #endif
1624   /* Convert to PetscSection+IS */
1625   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1626   for (v = 0; v < nvtxs; ++v) {
1627     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1628   }
1629   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1630   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1631   for (p = 0, i = 0; p < nparts; ++p) {
1632     for (v = 0; v < nvtxs; ++v) {
1633       if (assignment[v] == p) points[i++] = v;
1634     }
1635   }
1636   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1637   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1638   if (global_method == INERTIAL_METHOD) {
1639     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1640   }
1641   ierr = PetscFree(assignment);CHKERRQ(ierr);
1642   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1643   PetscFunctionReturn(0);
1644 #else
1645   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1646 #endif
1647 }
1648 
1649 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1650 {
1651   PetscFunctionBegin;
1652   part->noGraph        = PETSC_FALSE;
1653   part->ops->view      = PetscPartitionerView_Chaco;
1654   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1655   part->ops->partition = PetscPartitionerPartition_Chaco;
1656   PetscFunctionReturn(0);
1657 }
1658 
1659 /*MC
1660   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1661 
1662   Level: intermediate
1663 
1664 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1665 M*/
1666 
1667 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1668 {
1669   PetscPartitioner_Chaco *p;
1670   PetscErrorCode          ierr;
1671 
1672   PetscFunctionBegin;
1673   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1674   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1675   part->data = p;
1676 
1677   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1678   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1679   PetscFunctionReturn(0);
1680 }
1681 
1682 static const char *ptypes[] = {"kway", "rb"};
1683 
1684 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1685 {
1686   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1687   PetscErrorCode             ierr;
1688 
1689   PetscFunctionBegin;
1690   ierr = PetscFree(p);CHKERRQ(ierr);
1691   PetscFunctionReturn(0);
1692 }
1693 
1694 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1695 {
1696   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1697   PetscErrorCode             ierr;
1698 
1699   PetscFunctionBegin;
1700   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1701   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1702   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1703   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1704   ierr = PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);CHKERRQ(ierr);
1705   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1706   PetscFunctionReturn(0);
1707 }
1708 
1709 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1710 {
1711   PetscBool      iascii;
1712   PetscErrorCode ierr;
1713 
1714   PetscFunctionBegin;
1715   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1716   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1717   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1718   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1719   PetscFunctionReturn(0);
1720 }
1721 
1722 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1723 {
1724   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1725   PetscErrorCode            ierr;
1726 
1727   PetscFunctionBegin;
1728   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1729   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1730   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1731   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1732   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);CHKERRQ(ierr);
1733   ierr = PetscOptionsTail();CHKERRQ(ierr);
1734   PetscFunctionReturn(0);
1735 }
1736 
1737 #if defined(PETSC_HAVE_PARMETIS)
1738 #include <parmetis.h>
1739 #endif
1740 
1741 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1742 {
1743 #if defined(PETSC_HAVE_PARMETIS)
1744   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1745   MPI_Comm       comm;
1746   PetscSection   section;
1747   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1748   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1749   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1750   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1751   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1752   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1753   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1754   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1755   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1756   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1757   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1758   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1759   PetscInt       options[64];               /* Options */
1760   /* Outputs */
1761   PetscInt       v, i, *assignment, *points;
1762   PetscMPIInt    size, rank, p;
1763   PetscErrorCode ierr;
1764 
1765   PetscFunctionBegin;
1766   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1767   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1768   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1769   /* Calculate vertex distribution */
1770   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1771   vtxdist[0] = 0;
1772   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1773   for (p = 2; p <= size; ++p) {
1774     vtxdist[p] += vtxdist[p-1];
1775   }
1776   /* Calculate partition weights */
1777   for (p = 0; p < nparts; ++p) {
1778     tpwgts[p] = 1.0/nparts;
1779   }
1780   ubvec[0] = pm->imbalanceRatio;
1781   /* Weight cells by dofs on cell by default */
1782   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1783   for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1784   if (section) {
1785     PetscInt cStart, cEnd, dof;
1786 
1787     /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1788     /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1789     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1790     for (v = cStart; v < cStart + numVertices; ++v) {
1791       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1792       vwgt[v-cStart] = PetscMax(dof, 1);
1793     }
1794   }
1795   wgtflag |= 2; /* have weights on graph vertices */
1796 
1797   if (nparts == 1) {
1798     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1799   } else {
1800     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1801     if (vtxdist[p+1] == vtxdist[size]) {
1802       if (rank == p) {
1803         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1804         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1805         options[METIS_OPTION_SEED]   = pm->randomSeed;
1806         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1807         if (metis_ptype == 1) {
1808           PetscStackPush("METIS_PartGraphRecursive");
1809           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1810           PetscStackPop;
1811           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1812         } else {
1813           /*
1814            It would be nice to activate the two options below, but they would need some actual testing.
1815            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1816            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1817           */
1818           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1819           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1820           PetscStackPush("METIS_PartGraphKway");
1821           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1822           PetscStackPop;
1823           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1824         }
1825       }
1826     } else {
1827       options[0] = 1; /*use options */
1828       options[1] = pm->debugFlag;
1829       options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1830       PetscStackPush("ParMETIS_V3_PartKway");
1831       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1832       PetscStackPop;
1833       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1834     }
1835   }
1836   /* Convert to PetscSection+IS */
1837   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1838   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1839   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1840   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1841   for (p = 0, i = 0; p < nparts; ++p) {
1842     for (v = 0; v < nvtxs; ++v) {
1843       if (assignment[v] == p) points[i++] = v;
1844     }
1845   }
1846   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1847   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1848   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1849   PetscFunctionReturn(0);
1850 #else
1851   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1852 #endif
1853 }
1854 
1855 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1856 {
1857   PetscFunctionBegin;
1858   part->noGraph             = PETSC_FALSE;
1859   part->ops->view           = PetscPartitionerView_ParMetis;
1860   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1861   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1862   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1863   PetscFunctionReturn(0);
1864 }
1865 
1866 /*MC
1867   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1868 
1869   Level: intermediate
1870 
1871 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1872 M*/
1873 
1874 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1875 {
1876   PetscPartitioner_ParMetis *p;
1877   PetscErrorCode          ierr;
1878 
1879   PetscFunctionBegin;
1880   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1881   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1882   part->data = p;
1883 
1884   p->ptype          = 0;
1885   p->imbalanceRatio = 1.05;
1886   p->debugFlag      = 0;
1887   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
1888 
1889   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1890   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1891   PetscFunctionReturn(0);
1892 }
1893 
1894 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1895 const char PTScotchPartitionerCitation[] =
1896   "@article{PTSCOTCH,\n"
1897   "  author  = {C. Chevalier and F. Pellegrini},\n"
1898   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1899   "  journal = {Parallel Computing},\n"
1900   "  volume  = {34},\n"
1901   "  number  = {6},\n"
1902   "  pages   = {318--331},\n"
1903   "  year    = {2008},\n"
1904   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1905   "}\n";
1906 
1907 #if defined(PETSC_HAVE_PTSCOTCH)
1908 
1909 EXTERN_C_BEGIN
1910 #include <ptscotch.h>
1911 EXTERN_C_END
1912 
1913 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1914 
1915 static int PTScotch_Strategy(PetscInt strategy)
1916 {
1917   switch (strategy) {
1918   case  0: return SCOTCH_STRATDEFAULT;
1919   case  1: return SCOTCH_STRATQUALITY;
1920   case  2: return SCOTCH_STRATSPEED;
1921   case  3: return SCOTCH_STRATBALANCE;
1922   case  4: return SCOTCH_STRATSAFETY;
1923   case  5: return SCOTCH_STRATSCALABILITY;
1924   case  6: return SCOTCH_STRATRECURSIVE;
1925   case  7: return SCOTCH_STRATREMAP;
1926   default: return SCOTCH_STRATDEFAULT;
1927   }
1928 }
1929 
1930 
1931 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1932                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1933 {
1934   SCOTCH_Graph   grafdat;
1935   SCOTCH_Strat   stradat;
1936   SCOTCH_Num     vertnbr = n;
1937   SCOTCH_Num     edgenbr = xadj[n];
1938   SCOTCH_Num*    velotab = vtxwgt;
1939   SCOTCH_Num*    edlotab = adjwgt;
1940   SCOTCH_Num     flagval = strategy;
1941   double         kbalval = imbalance;
1942   PetscErrorCode ierr;
1943 
1944   PetscFunctionBegin;
1945   {
1946     PetscBool flg = PETSC_TRUE;
1947     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1948     if (!flg) velotab = NULL;
1949   }
1950   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1951   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1952   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1953   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1954 #if defined(PETSC_USE_DEBUG)
1955   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1956 #endif
1957   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1958   SCOTCH_stratExit(&stradat);
1959   SCOTCH_graphExit(&grafdat);
1960   PetscFunctionReturn(0);
1961 }
1962 
1963 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1964                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1965 {
1966   PetscMPIInt     procglbnbr;
1967   PetscMPIInt     proclocnum;
1968   SCOTCH_Arch     archdat;
1969   SCOTCH_Dgraph   grafdat;
1970   SCOTCH_Dmapping mappdat;
1971   SCOTCH_Strat    stradat;
1972   SCOTCH_Num      vertlocnbr;
1973   SCOTCH_Num      edgelocnbr;
1974   SCOTCH_Num*     veloloctab = vtxwgt;
1975   SCOTCH_Num*     edloloctab = adjwgt;
1976   SCOTCH_Num      flagval = strategy;
1977   double          kbalval = imbalance;
1978   PetscErrorCode  ierr;
1979 
1980   PetscFunctionBegin;
1981   {
1982     PetscBool flg = PETSC_TRUE;
1983     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1984     if (!flg) veloloctab = NULL;
1985   }
1986   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1987   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1988   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1989   edgelocnbr = xadj[vertlocnbr];
1990 
1991   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1992   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1993 #if defined(PETSC_USE_DEBUG)
1994   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1995 #endif
1996   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1997   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1998   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1999   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2000   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
2001 
2002   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2003   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2004   SCOTCH_archExit(&archdat);
2005   SCOTCH_stratExit(&stradat);
2006   SCOTCH_dgraphExit(&grafdat);
2007   PetscFunctionReturn(0);
2008 }
2009 
2010 #endif /* PETSC_HAVE_PTSCOTCH */
2011 
2012 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2013 {
2014   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2015   PetscErrorCode             ierr;
2016 
2017   PetscFunctionBegin;
2018   ierr = PetscFree(p);CHKERRQ(ierr);
2019   PetscFunctionReturn(0);
2020 }
2021 
2022 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2023 {
2024   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2025   PetscErrorCode            ierr;
2026 
2027   PetscFunctionBegin;
2028   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
2029   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
2030   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
2031   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
2032   PetscFunctionReturn(0);
2033 }
2034 
2035 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2036 {
2037   PetscBool      iascii;
2038   PetscErrorCode ierr;
2039 
2040   PetscFunctionBegin;
2041   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2042   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
2043   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
2044   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
2045   PetscFunctionReturn(0);
2046 }
2047 
2048 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2049 {
2050   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2051   const char *const         *slist = PTScotchStrategyList;
2052   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2053   PetscBool                 flag;
2054   PetscErrorCode            ierr;
2055 
2056   PetscFunctionBegin;
2057   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
2058   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
2059   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
2060   ierr = PetscOptionsTail();CHKERRQ(ierr);
2061   PetscFunctionReturn(0);
2062 }
2063 
2064 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
2065 {
2066 #if defined(PETSC_HAVE_PTSCOTCH)
2067   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
2068   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
2069   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
2070   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
2071   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
2072   PetscInt      *vwgt       = NULL;        /* Vertex weights */
2073   PetscInt      *adjwgt     = NULL;        /* Edge weights */
2074   PetscInt       v, i, *assignment, *points;
2075   PetscMPIInt    size, rank, p;
2076   PetscErrorCode ierr;
2077 
2078   PetscFunctionBegin;
2079   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2080   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2081   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
2082 
2083   /* Calculate vertex distribution */
2084   vtxdist[0] = 0;
2085   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2086   for (p = 2; p <= size; ++p) {
2087     vtxdist[p] += vtxdist[p-1];
2088   }
2089 
2090   if (nparts == 1) {
2091     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
2092   } else { /* Weight cells by dofs on cell by default */
2093     PetscSection section;
2094 
2095     /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
2096     /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
2097     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
2098     for (v = 0; v < PetscMax(nvtxs,1); ++v) vwgt[v] = 1;
2099     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
2100     if (section) {
2101       PetscInt vStart, vEnd, dof;
2102       ierr = DMPlexGetHeightStratum(dm, part->height, &vStart, &vEnd);CHKERRQ(ierr);
2103       for (v = vStart; v < vStart + numVertices; ++v) {
2104         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
2105         vwgt[v-vStart] = PetscMax(dof, 1);
2106       }
2107     }
2108     {
2109       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2110       int                       strat = PTScotch_Strategy(pts->strategy);
2111       double                    imbal = (double)pts->imbalance;
2112 
2113       for (p = 0; !vtxdist[p+1] && p < size; ++p);
2114       if (vtxdist[p+1] == vtxdist[size]) {
2115         if (rank == p) {
2116           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
2117         }
2118       } else {
2119         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
2120       }
2121     }
2122     ierr = PetscFree(vwgt);CHKERRQ(ierr);
2123   }
2124 
2125   /* Convert to PetscSection+IS */
2126   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
2127   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2128   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
2129   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2130   for (p = 0, i = 0; p < nparts; ++p) {
2131     for (v = 0; v < nvtxs; ++v) {
2132       if (assignment[v] == p) points[i++] = v;
2133     }
2134   }
2135   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2136   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2137 
2138   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
2139   PetscFunctionReturn(0);
2140 #else
2141   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2142 #endif
2143 }
2144 
2145 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2146 {
2147   PetscFunctionBegin;
2148   part->noGraph             = PETSC_FALSE;
2149   part->ops->view           = PetscPartitionerView_PTScotch;
2150   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2151   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2152   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2153   PetscFunctionReturn(0);
2154 }
2155 
2156 /*MC
2157   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2158 
2159   Level: intermediate
2160 
2161 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2162 M*/
2163 
2164 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2165 {
2166   PetscPartitioner_PTScotch *p;
2167   PetscErrorCode          ierr;
2168 
2169   PetscFunctionBegin;
2170   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2171   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2172   part->data = p;
2173 
2174   p->strategy  = 0;
2175   p->imbalance = 0.01;
2176 
2177   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2178   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2179   PetscFunctionReturn(0);
2180 }
2181 
2182 
2183 /*@
2184   DMPlexGetPartitioner - Get the mesh partitioner
2185 
2186   Not collective
2187 
2188   Input Parameter:
2189 . dm - The DM
2190 
2191   Output Parameter:
2192 . part - The PetscPartitioner
2193 
2194   Level: developer
2195 
2196   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2197 
2198 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
2199 @*/
2200 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2201 {
2202   DM_Plex       *mesh = (DM_Plex *) dm->data;
2203 
2204   PetscFunctionBegin;
2205   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2206   PetscValidPointer(part, 2);
2207   *part = mesh->partitioner;
2208   PetscFunctionReturn(0);
2209 }
2210 
2211 /*@
2212   DMPlexSetPartitioner - Set the mesh partitioner
2213 
2214   logically collective on dm and part
2215 
2216   Input Parameters:
2217 + dm - The DM
2218 - part - The partitioner
2219 
2220   Level: developer
2221 
2222   Note: Any existing PetscPartitioner will be destroyed.
2223 
2224 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2225 @*/
2226 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2227 {
2228   DM_Plex       *mesh = (DM_Plex *) dm->data;
2229   PetscErrorCode ierr;
2230 
2231   PetscFunctionBegin;
2232   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2233   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2234   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2235   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2236   mesh->partitioner = part;
2237   PetscFunctionReturn(0);
2238 }
2239 
2240 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2241 {
2242   PetscErrorCode ierr;
2243 
2244   PetscFunctionBegin;
2245   if (up) {
2246     PetscInt parent;
2247 
2248     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2249     if (parent != point) {
2250       PetscInt closureSize, *closure = NULL, i;
2251 
2252       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2253       for (i = 0; i < closureSize; i++) {
2254         PetscInt cpoint = closure[2*i];
2255 
2256         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2257         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2258       }
2259       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2260     }
2261   }
2262   if (down) {
2263     PetscInt numChildren;
2264     const PetscInt *children;
2265 
2266     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2267     if (numChildren) {
2268       PetscInt i;
2269 
2270       for (i = 0; i < numChildren; i++) {
2271         PetscInt cpoint = children[i];
2272 
2273         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2274         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2275       }
2276     }
2277   }
2278   PetscFunctionReturn(0);
2279 }
2280 
2281 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
2282 
2283 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2284 {
2285   DM_Plex        *mesh = (DM_Plex *)dm->data;
2286   PetscBool      hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2287   PetscInt       *closure = NULL, closureSize, nelems, *elems, off = 0, p, c;
2288   PetscHSetI     ht;
2289   PetscErrorCode ierr;
2290 
2291   PetscFunctionBegin;
2292   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2293   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2294   for (p = 0; p < numPoints; ++p) {
2295     ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2296     for (c = 0; c < closureSize*2; c += 2) {
2297       ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2298       if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2299     }
2300   }
2301   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2302   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2303   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2304   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2305   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2306   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2307   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2308   PetscFunctionReturn(0);
2309 }
2310 
2311 /*@
2312   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2313 
2314   Input Parameters:
2315 + dm     - The DM
2316 - label  - DMLabel assinging ranks to remote roots
2317 
2318   Level: developer
2319 
2320 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2321 @*/
2322 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2323 {
2324   IS              rankIS,   pointIS, closureIS;
2325   const PetscInt *ranks,   *points;
2326   PetscInt        numRanks, numPoints, r;
2327   PetscErrorCode  ierr;
2328 
2329   PetscFunctionBegin;
2330   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2331   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2332   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2333   for (r = 0; r < numRanks; ++r) {
2334     const PetscInt rank = ranks[r];
2335     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2336     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2337     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2338     ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr);
2339     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2340     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2341     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2342     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2343   }
2344   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2345   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2346   PetscFunctionReturn(0);
2347 }
2348 
2349 /*@
2350   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2351 
2352   Input Parameters:
2353 + dm     - The DM
2354 - label  - DMLabel assinging ranks to remote roots
2355 
2356   Level: developer
2357 
2358 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2359 @*/
2360 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2361 {
2362   IS              rankIS,   pointIS;
2363   const PetscInt *ranks,   *points;
2364   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2365   PetscInt       *adj = NULL;
2366   PetscErrorCode  ierr;
2367 
2368   PetscFunctionBegin;
2369   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2370   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2371   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2372   for (r = 0; r < numRanks; ++r) {
2373     const PetscInt rank = ranks[r];
2374 
2375     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2376     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2377     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2378     for (p = 0; p < numPoints; ++p) {
2379       adjSize = PETSC_DETERMINE;
2380       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2381       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2382     }
2383     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2384     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2385   }
2386   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2387   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2388   ierr = PetscFree(adj);CHKERRQ(ierr);
2389   PetscFunctionReturn(0);
2390 }
2391 
2392 /*@
2393   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2394 
2395   Input Parameters:
2396 + dm     - The DM
2397 - label  - DMLabel assinging ranks to remote roots
2398 
2399   Level: developer
2400 
2401   Note: This is required when generating multi-level overlaps to capture
2402   overlap points from non-neighbouring partitions.
2403 
2404 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2405 @*/
2406 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2407 {
2408   MPI_Comm        comm;
2409   PetscMPIInt     rank;
2410   PetscSF         sfPoint;
2411   DMLabel         lblRoots, lblLeaves;
2412   IS              rankIS, pointIS;
2413   const PetscInt *ranks;
2414   PetscInt        numRanks, r;
2415   PetscErrorCode  ierr;
2416 
2417   PetscFunctionBegin;
2418   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2419   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2420   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2421   /* Pull point contributions from remote leaves into local roots */
2422   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2423   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2424   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2425   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2426   for (r = 0; r < numRanks; ++r) {
2427     const PetscInt remoteRank = ranks[r];
2428     if (remoteRank == rank) continue;
2429     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2430     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2431     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2432   }
2433   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2434   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2435   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2436   /* Push point contributions from roots into remote leaves */
2437   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2438   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2439   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2440   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2441   for (r = 0; r < numRanks; ++r) {
2442     const PetscInt remoteRank = ranks[r];
2443     if (remoteRank == rank) continue;
2444     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2445     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2446     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2447   }
2448   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2449   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2450   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2451   PetscFunctionReturn(0);
2452 }
2453 
2454 /*@
2455   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2456 
2457   Input Parameters:
2458 + dm        - The DM
2459 . rootLabel - DMLabel assinging ranks to local roots
2460 . processSF - A star forest mapping into the local index on each remote rank
2461 
2462   Output Parameter:
2463 - leafLabel - DMLabel assinging ranks to remote roots
2464 
2465   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2466   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2467 
2468   Level: developer
2469 
2470 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2471 @*/
2472 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2473 {
2474   MPI_Comm           comm;
2475   PetscMPIInt        rank, size, r;
2476   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2477   PetscSF            sfPoint;
2478   PetscSection       rootSection;
2479   PetscSFNode       *rootPoints, *leafPoints;
2480   const PetscSFNode *remote;
2481   const PetscInt    *local, *neighbors;
2482   IS                 valueIS;
2483   PetscBool          mpiOverflow = PETSC_FALSE;
2484   PetscErrorCode     ierr;
2485 
2486   PetscFunctionBegin;
2487   ierr = PetscLogEventBegin(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2488   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2489   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2490   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2491   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2492 
2493   /* Convert to (point, rank) and use actual owners */
2494   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2495   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2496   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2497   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2498   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2499   for (n = 0; n < numNeighbors; ++n) {
2500     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2501     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2502   }
2503   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2504   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2505   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2506   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2507   for (n = 0; n < numNeighbors; ++n) {
2508     IS              pointIS;
2509     const PetscInt *points;
2510 
2511     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2512     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2513     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2514     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2515     for (p = 0; p < numPoints; ++p) {
2516       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2517       else       {l = -1;}
2518       if (l >= 0) {rootPoints[off+p] = remote[l];}
2519       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2520     }
2521     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2522     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2523   }
2524 
2525   /* Try to communicate overlap using All-to-All */
2526   if (!processSF) {
2527     PetscInt64  counter = 0;
2528     PetscBool   locOverflow = PETSC_FALSE;
2529     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2530 
2531     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2532     for (n = 0; n < numNeighbors; ++n) {
2533       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2534       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2535 #if defined(PETSC_USE_64BIT_INDICES)
2536       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2537       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2538 #endif
2539       scounts[neighbors[n]] = (PetscMPIInt) dof;
2540       sdispls[neighbors[n]] = (PetscMPIInt) off;
2541     }
2542     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2543     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2544     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2545     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2546     if (!mpiOverflow) {
2547       leafSize = (PetscInt) counter;
2548       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2549       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2550     }
2551     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2552   }
2553 
2554   /* Communicate overlap using process star forest */
2555   if (processSF || mpiOverflow) {
2556     PetscSF      procSF;
2557     PetscSFNode  *remote;
2558     PetscSection leafSection;
2559 
2560     if (processSF) {
2561       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2562       procSF = processSF;
2563     } else {
2564       ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr);
2565       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2566       ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr);
2567       ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2568     }
2569 
2570     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2571     ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2572     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2573     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2574     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2575   }
2576 
2577   for (p = 0; p < leafSize; p++) {
2578     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2579   }
2580 
2581   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2582   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2583   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2584   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2585   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2586   ierr = PetscLogEventEnd(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2587   PetscFunctionReturn(0);
2588 }
2589 
2590 /*@
2591   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2592 
2593   Input Parameters:
2594 + dm    - The DM
2595 . label - DMLabel assinging ranks to remote roots
2596 
2597   Output Parameter:
2598 - sf    - The star forest communication context encapsulating the defined mapping
2599 
2600   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2601 
2602   Level: developer
2603 
2604 .seealso: DMPlexDistribute(), DMPlexCreateOverlap()
2605 @*/
2606 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2607 {
2608   PetscMPIInt     rank, size;
2609   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2610   PetscSFNode    *remotePoints;
2611   IS              remoteRootIS;
2612   const PetscInt *remoteRoots;
2613   PetscErrorCode ierr;
2614 
2615   PetscFunctionBegin;
2616   ierr = PetscLogEventBegin(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
2617   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2618   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2619 
2620   for (numRemote = 0, n = 0; n < size; ++n) {
2621     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2622     numRemote += numPoints;
2623   }
2624   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2625   /* Put owned points first */
2626   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2627   if (numPoints > 0) {
2628     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2629     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2630     for (p = 0; p < numPoints; p++) {
2631       remotePoints[idx].index = remoteRoots[p];
2632       remotePoints[idx].rank = rank;
2633       idx++;
2634     }
2635     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2636     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2637   }
2638   /* Now add remote points */
2639   for (n = 0; n < size; ++n) {
2640     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2641     if (numPoints <= 0 || n == rank) continue;
2642     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2643     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2644     for (p = 0; p < numPoints; p++) {
2645       remotePoints[idx].index = remoteRoots[p];
2646       remotePoints[idx].rank = n;
2647       idx++;
2648     }
2649     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2650     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2651   }
2652   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2653   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2654   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2655   ierr = PetscSFSetUp(*sf);CHKERRQ(ierr);
2656   ierr = PetscLogEventEnd(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
2657   PetscFunctionReturn(0);
2658 }
2659 
2660 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
2661  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
2662  * them out in that case. */
2663 #if defined(PETSC_HAVE_PARMETIS)
2664 /*@C
2665 
2666   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
2667 
2668   Input parameters:
2669   + dm                - The DMPlex object.
2670   + n                 - The number of points.
2671   + pointsToRewrite   - The points in the SF whose ownership will change.
2672   + targetOwners      - New owner for each element in pointsToRewrite.
2673   + degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
2674 
2675   Level: developer
2676 
2677 @*/
2678 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
2679 {
2680   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
2681   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
2682   PetscSFNode  *leafLocationsNew;
2683   const         PetscSFNode *iremote;
2684   const         PetscInt *ilocal;
2685   PetscBool    *isLeaf;
2686   PetscSF       sf;
2687   MPI_Comm      comm;
2688   PetscMPIInt   rank, size;
2689 
2690   PetscFunctionBegin;
2691   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2692   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2693   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2694   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2695 
2696   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2697   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2698   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2699   for (i=0; i<pEnd-pStart; i++) {
2700     isLeaf[i] = PETSC_FALSE;
2701   }
2702   for (i=0; i<nleafs; i++) {
2703     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2704   }
2705 
2706   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
2707   cumSumDegrees[0] = 0;
2708   for (i=1; i<=pEnd-pStart; i++) {
2709     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
2710   }
2711   sumDegrees = cumSumDegrees[pEnd-pStart];
2712   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
2713 
2714   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
2715   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
2716   for (i=0; i<pEnd-pStart; i++) {
2717     rankOnLeafs[i] = rank;
2718   }
2719   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2720   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2721   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
2722 
2723   /* get the remote local points of my leaves */
2724   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
2725   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
2726   for (i=0; i<pEnd-pStart; i++) {
2727     points[i] = pStart+i;
2728   }
2729   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2730   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2731   ierr = PetscFree(points);CHKERRQ(ierr);
2732   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
2733   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
2734   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
2735   for (i=0; i<pEnd-pStart; i++) {
2736     newOwners[i] = -1;
2737     newNumbers[i] = -1;
2738   }
2739   {
2740     PetscInt oldNumber, newNumber, oldOwner, newOwner;
2741     for (i=0; i<n; i++) {
2742       oldNumber = pointsToRewrite[i];
2743       newNumber = -1;
2744       oldOwner = rank;
2745       newOwner = targetOwners[i];
2746       if (oldOwner == newOwner) {
2747         newNumber = oldNumber;
2748       } else {
2749         for (j=0; j<degrees[oldNumber]; j++) {
2750           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
2751             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
2752             break;
2753           }
2754         }
2755       }
2756       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
2757 
2758       newOwners[oldNumber] = newOwner;
2759       newNumbers[oldNumber] = newNumber;
2760     }
2761   }
2762   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
2763   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
2764   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
2765 
2766   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2767   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2768   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2769   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2770 
2771   /* Now count how many leafs we have on each processor. */
2772   leafCounter=0;
2773   for (i=0; i<pEnd-pStart; i++) {
2774     if (newOwners[i] >= 0) {
2775       if (newOwners[i] != rank) {
2776         leafCounter++;
2777       }
2778     } else {
2779       if (isLeaf[i]) {
2780         leafCounter++;
2781       }
2782     }
2783   }
2784 
2785   /* Now set up the new sf by creating the leaf arrays */
2786   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
2787   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
2788 
2789   leafCounter = 0;
2790   counter = 0;
2791   for (i=0; i<pEnd-pStart; i++) {
2792     if (newOwners[i] >= 0) {
2793       if (newOwners[i] != rank) {
2794         leafsNew[leafCounter] = i;
2795         leafLocationsNew[leafCounter].rank = newOwners[i];
2796         leafLocationsNew[leafCounter].index = newNumbers[i];
2797         leafCounter++;
2798       }
2799     } else {
2800       if (isLeaf[i]) {
2801         leafsNew[leafCounter] = i;
2802         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
2803         leafLocationsNew[leafCounter].index = iremote[counter].index;
2804         leafCounter++;
2805       }
2806     }
2807     if (isLeaf[i]) {
2808       counter++;
2809     }
2810   }
2811 
2812   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
2813   ierr = PetscFree(newOwners);CHKERRQ(ierr);
2814   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
2815   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
2816   PetscFunctionReturn(0);
2817 }
2818 
2819 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
2820 {
2821   PetscInt *distribution, min, max, sum, i, ierr;
2822   PetscMPIInt rank, size;
2823   PetscFunctionBegin;
2824   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2825   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2826   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
2827   for (i=0; i<n; i++) {
2828     if (part) distribution[part[i]] += vtxwgt[skip*i];
2829     else distribution[rank] += vtxwgt[skip*i];
2830   }
2831   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2832   min = distribution[0];
2833   max = distribution[0];
2834   sum = distribution[0];
2835   for (i=1; i<size; i++) {
2836     if (distribution[i]<min) min=distribution[i];
2837     if (distribution[i]>max) max=distribution[i];
2838     sum += distribution[i];
2839   }
2840   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
2841   ierr = PetscFree(distribution);CHKERRQ(ierr);
2842   PetscFunctionReturn(0);
2843 }
2844 
2845 #endif
2846 
2847 /*@
2848   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
2849 
2850   Input parameters:
2851   + dm               - The DMPlex object.
2852   + entityDepth      - depth of the entity to balance (0 -> balance vertices).
2853   + useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
2854   + parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
2855 
2856   Output parameters:
2857   + success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
2858 
2859   Level: user
2860 
2861 @*/
2862 
2863 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
2864 {
2865 #if defined(PETSC_HAVE_PARMETIS)
2866   PetscSF     sf;
2867   PetscInt    ierr, i, j, idx, jdx;
2868   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
2869   const       PetscInt *degrees, *ilocal;
2870   const       PetscSFNode *iremote;
2871   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
2872   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
2873   PetscMPIInt rank, size;
2874   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
2875   const       PetscInt *cumSumVertices;
2876   PetscInt    offset, counter;
2877   PetscInt    lenadjncy;
2878   PetscInt    *xadj, *adjncy, *vtxwgt;
2879   PetscInt    lenxadj;
2880   PetscInt    *adjwgt = NULL;
2881   PetscInt    *part, *options;
2882   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
2883   real_t      *ubvec;
2884   PetscInt    *firstVertices, *renumbering;
2885   PetscInt    failed, failedGlobal;
2886   MPI_Comm    comm;
2887   Mat         A, Apre;
2888   const char *prefix = NULL;
2889   PetscViewer       viewer;
2890   PetscViewerFormat format;
2891   PetscLayout layout;
2892 
2893   PetscFunctionBegin;
2894   if (success) *success = PETSC_FALSE;
2895   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2896   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2897   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2898   if (size==1) PetscFunctionReturn(0);
2899 
2900   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
2901 
2902   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
2903   if (viewer) {
2904     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
2905   }
2906 
2907   /* Figure out all points in the plex that we are interested in balancing. */
2908   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
2909   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2910   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
2911 
2912   for (i=0; i<pEnd-pStart; i++) {
2913     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
2914   }
2915 
2916   /* There are three types of points:
2917    * exclusivelyOwned: points that are owned by this process and only seen by this process
2918    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
2919    * leaf: a point that is seen by this process but owned by a different process
2920    */
2921   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2922   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2923   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2924   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
2925   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
2926   for (i=0; i<pEnd-pStart; i++) {
2927     isNonExclusivelyOwned[i] = PETSC_FALSE;
2928     isExclusivelyOwned[i] = PETSC_FALSE;
2929     isLeaf[i] = PETSC_FALSE;
2930   }
2931 
2932   /* start by marking all the leafs */
2933   for (i=0; i<nleafs; i++) {
2934     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2935   }
2936 
2937   /* for an owned point, we can figure out whether another processor sees it or
2938    * not by calculating its degree */
2939   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
2940   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
2941 
2942   numExclusivelyOwned = 0;
2943   numNonExclusivelyOwned = 0;
2944   for (i=0; i<pEnd-pStart; i++) {
2945     if (toBalance[i]) {
2946       if (degrees[i] > 0) {
2947         isNonExclusivelyOwned[i] = PETSC_TRUE;
2948         numNonExclusivelyOwned += 1;
2949       } else {
2950         if (!isLeaf[i]) {
2951           isExclusivelyOwned[i] = PETSC_TRUE;
2952           numExclusivelyOwned += 1;
2953         }
2954       }
2955     }
2956   }
2957 
2958   /* We are going to build a graph with one vertex per core representing the
2959    * exclusively owned points and then one vertex per nonExclusively owned
2960    * point. */
2961 
2962   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2963   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
2964   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2965   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
2966 
2967   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
2968   offset = cumSumVertices[rank];
2969   counter = 0;
2970   for (i=0; i<pEnd-pStart; i++) {
2971     if (toBalance[i]) {
2972       if (degrees[i] > 0) {
2973         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
2974         counter++;
2975       }
2976     }
2977   }
2978 
2979   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
2980   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
2981   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
2982   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
2983 
2984   /* Now start building the data structure for ParMETIS */
2985 
2986   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
2987   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
2988   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
2989   ierr = MatSetUp(Apre);CHKERRQ(ierr);
2990 
2991   for (i=0; i<pEnd-pStart; i++) {
2992     if (toBalance[i]) {
2993       idx = cumSumVertices[rank];
2994       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
2995       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
2996       else continue;
2997       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
2998       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
2999     }
3000   }
3001 
3002   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3003   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3004 
3005   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
3006   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
3007   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3008   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
3009   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
3010 
3011   for (i=0; i<pEnd-pStart; i++) {
3012     if (toBalance[i]) {
3013       idx = cumSumVertices[rank];
3014       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3015       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3016       else continue;
3017       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3018       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3019     }
3020   }
3021 
3022   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3023   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3024   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
3025   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3026 
3027   nparts = size;
3028   wgtflag = 2;
3029   numflag = 0;
3030   ncon = 2;
3031   real_t *tpwgts;
3032   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
3033   for (i=0; i<ncon*nparts; i++) {
3034     tpwgts[i] = 1./(nparts);
3035   }
3036 
3037   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
3038   ubvec[0] = 1.01;
3039   ubvec[1] = 1.01;
3040   lenadjncy = 0;
3041   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3042     PetscInt temp=0;
3043     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3044     lenadjncy += temp;
3045     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3046   }
3047   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
3048   lenxadj = 2 + numNonExclusivelyOwned;
3049   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
3050   xadj[0] = 0;
3051   counter = 0;
3052   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3053     PetscInt        temp=0;
3054     const PetscInt *cols;
3055     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3056     ierr = PetscMemcpy(&adjncy[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
3057     counter += temp;
3058     xadj[i+1] = counter;
3059     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3060   }
3061 
3062   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
3063   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
3064   vtxwgt[0] = numExclusivelyOwned;
3065   if (ncon>1) vtxwgt[1] = 1;
3066   for (i=0; i<numNonExclusivelyOwned; i++) {
3067     vtxwgt[ncon*(i+1)] = 1;
3068     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3069   }
3070 
3071   if (viewer) {
3072     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
3073     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
3074   }
3075   if (parallel) {
3076     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
3077     options[0] = 1;
3078     options[1] = 0; /* Verbosity */
3079     options[2] = 0; /* Seed */
3080     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3081     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
3082     if (useInitialGuess) {
3083       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
3084       PetscStackPush("ParMETIS_V3_RefineKway");
3085       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3086       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3087       PetscStackPop;
3088     } else {
3089       PetscStackPush("ParMETIS_V3_PartKway");
3090       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3091       PetscStackPop;
3092       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3093     }
3094     ierr = PetscFree(options);CHKERRQ(ierr);
3095   } else {
3096     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
3097     Mat As;
3098     PetscInt numRows;
3099     PetscInt *partGlobal;
3100     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
3101 
3102     PetscInt *numExclusivelyOwnedAll;
3103     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
3104     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3105     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
3106 
3107     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
3108     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
3109     if (!rank) {
3110       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3111       lenadjncy = 0;
3112 
3113       for (i=0; i<numRows; i++) {
3114         PetscInt temp=0;
3115         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3116         lenadjncy += temp;
3117         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3118       }
3119       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
3120       lenxadj = 1 + numRows;
3121       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
3122       xadj_g[0] = 0;
3123       counter = 0;
3124       for (i=0; i<numRows; i++) {
3125         PetscInt        temp=0;
3126         const PetscInt *cols;
3127         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3128         ierr = PetscMemcpy(&adjncy_g[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
3129         counter += temp;
3130         xadj_g[i+1] = counter;
3131         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3132       }
3133       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
3134       for (i=0; i<size; i++){
3135         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3136         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3137         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3138           vtxwgt_g[ncon*j] = 1;
3139           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3140         }
3141       }
3142       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
3143       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
3144       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3145       options[METIS_OPTION_CONTIG] = 1;
3146       PetscStackPush("METIS_PartGraphKway");
3147       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3148       PetscStackPop;
3149       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3150       ierr = PetscFree(options);CHKERRQ(ierr);
3151       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
3152       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
3153       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
3154     }
3155     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
3156 
3157     /* Now scatter the parts array. */
3158     {
3159       PetscMPIInt *counts, *mpiCumSumVertices;
3160       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
3161       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
3162       for(i=0; i<size; i++) {
3163         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3164       }
3165       for(i=0; i<=size; i++) {
3166         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3167       }
3168       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3169       ierr = PetscFree(counts);CHKERRQ(ierr);
3170       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3171     }
3172 
3173     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3174     ierr = MatDestroy(&As);CHKERRQ(ierr);
3175   }
3176 
3177   ierr = MatDestroy(&A);CHKERRQ(ierr);
3178   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3179   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3180 
3181   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3182 
3183   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3184   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3185   firstVertices[rank] = part[0];
3186   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3187   for (i=0; i<size; i++) {
3188     renumbering[firstVertices[i]] = i;
3189   }
3190   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3191     part[i] = renumbering[part[i]];
3192   }
3193   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3194   failed = (PetscInt)(part[0] != rank);
3195   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3196 
3197   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3198   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3199 
3200   if (failedGlobal > 0) {
3201     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3202     ierr = PetscFree(xadj);CHKERRQ(ierr);
3203     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3204     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3205     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3206     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3207     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3208     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3209     ierr = PetscFree(part);CHKERRQ(ierr);
3210     if (viewer) {
3211       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3212       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3213     }
3214     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3215     PetscFunctionReturn(0);
3216   }
3217 
3218   /*Let's check how well we did distributing points*/
3219   if (viewer) {
3220     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3221     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3222     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3223     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3224     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3225   }
3226 
3227   /* Now check that every vertex is owned by a process that it is actually connected to. */
3228   for (i=1; i<=numNonExclusivelyOwned; i++) {
3229     PetscInt loc = 0;
3230     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3231     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3232     if (loc<0) {
3233       part[i] = rank;
3234     }
3235   }
3236 
3237   /* Let's see how significant the influences of the previous fixing up step was.*/
3238   if (viewer) {
3239     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3240     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3241   }
3242 
3243   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3244   ierr = PetscFree(xadj);CHKERRQ(ierr);
3245   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3246   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3247 
3248   /* Almost done, now rewrite the SF to reflect the new ownership. */
3249   {
3250     PetscInt *pointsToRewrite;
3251     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3252     counter = 0;
3253     for(i=0; i<pEnd-pStart; i++) {
3254       if (toBalance[i]) {
3255         if (isNonExclusivelyOwned[i]) {
3256           pointsToRewrite[counter] = i + pStart;
3257           counter++;
3258         }
3259       }
3260     }
3261     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3262     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3263   }
3264 
3265   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3266   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3267   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3268   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3269   ierr = PetscFree(part);CHKERRQ(ierr);
3270   if (viewer) {
3271     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3272     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3273   }
3274   if (success) *success = PETSC_TRUE;
3275   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3276 #else
3277   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3278 #endif
3279   PetscFunctionReturn(0);
3280 }
3281