xref: /petsc/src/dm/impls/plex/plexpartition.c (revision bbbc8e51870e2779407bb0f2e18f2ea4346c4bc5)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
31 
32 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
33 {
34   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
35   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
36   IS             cellNumbering;
37   const PetscInt *cellNum;
38   PetscBool      useCone, useClosure;
39   PetscSection   section;
40   PetscSegBuffer adjBuffer;
41   PetscSF        sfPoint;
42   PetscInt       *adjCells = NULL, *remoteCells = NULL;
43   const PetscInt *local;
44   PetscInt       nroots, nleaves, l;
45   PetscMPIInt    rank;
46   PetscErrorCode ierr;
47 
48   PetscFunctionBegin;
49   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
50   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
51   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
52   if (dim != depth) {
53     /* We do not handle the uninterpolated case here */
54     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
55     /* DMPlexCreateNeighborCSR does not make a numbering */
56     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
57     /* Different behavior for empty graphs */
58     if (!*numVertices) {
59       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
60       (*offsets)[0] = 0;
61     }
62     /* Broken in parallel */
63     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
64     PetscFunctionReturn(0);
65   }
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
74   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
75   ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
76   if (globalNumbering) {
77     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
78     *globalNumbering = cellNumbering;
79   }
80   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
82      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
83    */
84   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
85   if (nroots >= 0) {
86     PetscInt fStart, fEnd, f;
87 
88     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
89     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
90     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
91     for (f = fStart; f < fEnd; ++f) {
92       const PetscInt *support;
93       PetscInt        supportSize;
94 
95       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
96       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
97       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
98       else if (supportSize == 2) {
99         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
100         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
101         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
102         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
103       }
104       /* Handle non-conforming meshes */
105       if (supportSize > 2) {
106         PetscInt        numChildren, i;
107         const PetscInt *children;
108 
109         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
110         for (i = 0; i < numChildren; ++i) {
111           const PetscInt child = children[i];
112           if (fStart <= child && child < fEnd) {
113             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
114             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
115             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
116             else if (supportSize == 2) {
117               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
118               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
119               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
120               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
121             }
122           }
123         }
124       }
125     }
126     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
127     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
128     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
129     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
130     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
131   }
132   /* Combine local and global adjacencies */
133   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
134     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
135     if (nroots > 0) {if (cellNum[p] < 0) continue;}
136     /* Add remote cells */
137     if (remoteCells) {
138       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
139       PetscInt       coneSize, numChildren, c, i;
140       const PetscInt *cone, *children;
141 
142       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
143       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
144       for (c = 0; c < coneSize; ++c) {
145         const PetscInt point = cone[c];
146         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
147           PetscInt *PETSC_RESTRICT pBuf;
148           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
149           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
150           *pBuf = remoteCells[point];
151         }
152         /* Handle non-conforming meshes */
153         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
154         for (i = 0; i < numChildren; ++i) {
155           const PetscInt child = children[i];
156           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
157             PetscInt *PETSC_RESTRICT pBuf;
158             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
159             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
160             *pBuf = remoteCells[child];
161           }
162         }
163       }
164     }
165     /* Add local cells */
166     adjSize = PETSC_DETERMINE;
167     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
168     for (a = 0; a < adjSize; ++a) {
169       const PetscInt point = adj[a];
170       if (point != p && pStart <= point && point < pEnd) {
171         PetscInt *PETSC_RESTRICT pBuf;
172         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
173         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
174         *pBuf = DMPlex_GlobalID(cellNum[point]);
175       }
176     }
177     (*numVertices)++;
178   }
179   ierr = PetscFree(adj);CHKERRQ(ierr);
180   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
181   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
182 
183   /* Derive CSR graph from section/segbuffer */
184   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
185   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
186   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
187   for (idx = 0, p = pStart; p < pEnd; p++) {
188     if (nroots > 0) {if (cellNum[p] < 0) continue;}
189     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
190   }
191   vOffsets[*numVertices] = size;
192   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
193 
194   if (nroots >= 0) {
195     /* Filter out duplicate edges using section/segbuffer */
196     ierr = PetscSectionReset(section);CHKERRQ(ierr);
197     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
198     for (p = 0; p < *numVertices; p++) {
199       PetscInt start = vOffsets[p], end = vOffsets[p+1];
200       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
201       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
202       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
203       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
204       ierr = PetscMemcpy(edges, &graph[start], numEdges*sizeof(*edges));CHKERRQ(ierr);
205     }
206     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
207     ierr = PetscFree(graph);CHKERRQ(ierr);
208     /* Derive CSR graph from section/segbuffer */
209     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
210     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
211     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
212     for (idx = 0, p = 0; p < *numVertices; p++) {
213       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
214     }
215     vOffsets[*numVertices] = size;
216     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
217   } else {
218     /* Sort adjacencies (not strictly necessary) */
219     for (p = 0; p < *numVertices; p++) {
220       PetscInt start = vOffsets[p], end = vOffsets[p+1];
221       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
222     }
223   }
224 
225   if (offsets) *offsets = vOffsets;
226   if (adjacency) *adjacency = graph;
227 
228   /* Cleanup */
229   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
230   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
231   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
232   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
233   PetscFunctionReturn(0);
234 }
235 
236 static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
237 {
238   Mat            conn, CSR;
239   IS             fis, cis, cis_own;
240   PetscSF        sfPoint;
241   const char     *prefix;
242   const PetscInt *rows, *cols, *ii, *jj;
243   PetscInt       *idxs,*idxs2;
244   PetscInt       dim, depth, floc, cloc, i, M, N, c, m, cStart, cEnd, fStart, fEnd;
245   PetscMPIInt    rank;
246   PetscBool      flg;
247   PetscErrorCode ierr;
248 
249   PetscFunctionBegin;
250   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
251   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
252   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
253   if (dim != depth) {
254     /* We do not handle the uninterpolated case here */
255     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
256     /* DMPlexCreateNeighborCSR does not make a numbering */
257     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
258     /* Different behavior for empty graphs */
259     if (!*numVertices) {
260       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
261       (*offsets)[0] = 0;
262     }
263     /* Broken in parallel */
264     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
265     PetscFunctionReturn(0);
266   }
267   /* Interpolated and parallel case */
268   ierr = DMGetOptionsPrefix(dm, &prefix);CHKERRQ(ierr);
269 
270   /* numbering */
271   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
272   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
273   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
274   ierr = DMPlexCreateNumbering_Internal(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
275   ierr = DMPlexCreateNumbering_Internal(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
276   if (globalNumbering) {
277     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
278   }
279 
280   /* get positive global ids and local sizes for facets and cells */
281   ierr = ISGetLocalSize(fis, &m);CHKERRQ(ierr);
282   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
283   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
284   for (i = 0, floc = 0; i < m; i++) {
285     const PetscInt p = rows[i];
286 
287     if (p < 0) {
288       idxs[i] = -(p+1);
289     } else {
290       idxs[i] = p;
291       floc   += 1;
292     }
293   }
294   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
295   ierr = ISDestroy(&fis);CHKERRQ(ierr);
296   ierr = ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);CHKERRQ(ierr);
297 
298   ierr = ISGetLocalSize(cis, &m);CHKERRQ(ierr);
299   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
300   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
301   ierr = PetscMalloc1(m, &idxs2);CHKERRQ(ierr);
302   for (i = 0, cloc = 0; i < m; i++) {
303     const PetscInt p = cols[i];
304 
305     if (p < 0) {
306       idxs[i] = -(p+1);
307     } else {
308       idxs[i]       = p;
309       idxs2[cloc++] = p;
310     }
311   }
312   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
313   ierr = ISDestroy(&cis);CHKERRQ(ierr);
314   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);CHKERRQ(ierr);
315   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);CHKERRQ(ierr);
316 
317   ierr = PetscObjectSetName((PetscObject)cis, "Local cells");CHKERRQ(ierr);
318   ierr = PetscObjectSetName((PetscObject)cis_own, "Owned local cells");CHKERRQ(ierr);
319   ierr = PetscObjectSetOptionsPrefix((PetscObject)cis, prefix);CHKERRQ(ierr);
320   ierr = PetscObjectSetOptionsPrefix((PetscObject)cis_own, prefix);CHKERRQ(ierr);
321   ierr = ISViewFromOptions(cis, NULL, "-dm_plex_csr_is_view");CHKERRQ(ierr);
322   ierr = ISViewFromOptions(cis_own, NULL, "-dm_plex_csr_is_view");CHKERRQ(ierr);
323 
324   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
325   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
326   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
327   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
328   ierr = DMPlexGetMaxSizes(dm, NULL, &m);CHKERRQ(ierr);
329   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
330 
331   /* Assemble matrix */
332   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
333   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
334   for (c = cStart; c < cEnd; c++) {
335     const PetscInt *cone;
336     PetscInt        coneSize, row, col, f;
337 
338     col  = cols[c-cStart];
339     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
340     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
341     for (f = 0; f < coneSize; f++) {
342       const PetscScalar v = 1.0;
343       const PetscInt *children;
344       PetscInt        numChildren, ch;
345 
346       row  = rows[cone[f]-fStart];
347       ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
348 
349       /* non-conforming meshes */
350       ierr = DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);CHKERRQ(ierr);
351       for (ch = 0; ch < numChildren; ch++) {
352         const PetscInt child = children[ch];
353 
354         if (child < fStart || child >= fEnd) continue;
355         row  = rows[child-fStart];
356         ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
357       }
358     }
359   }
360   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
361   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
362   ierr = ISDestroy(&fis);CHKERRQ(ierr);
363   ierr = ISDestroy(&cis);CHKERRQ(ierr);
364   ierr = MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
365   ierr = MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
366 
367   ierr = PetscObjectSetName((PetscObject)conn, "F-C connectivity");CHKERRQ(ierr);
368   ierr = MatSetOptionsPrefix(conn, prefix);CHKERRQ(ierr);
369   ierr = MatViewFromOptions(conn, NULL, "-dm_plex_csr_conn_view");CHKERRQ(ierr);
370 
371   /* Get parallel CSR by doing conn^T * conn */
372   ierr = MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);CHKERRQ(ierr);
373   ierr = MatDestroy(&conn);CHKERRQ(ierr);
374 
375   ierr = PetscObjectSetName((PetscObject)CSR, "Connectivity");CHKERRQ(ierr);
376   ierr = MatSetOptionsPrefix(CSR, prefix);CHKERRQ(ierr);
377   ierr = DMViewFromOptions(dm, NULL, "-dm_plex_csr_dm_view");CHKERRQ(ierr);
378   ierr = MatViewFromOptions(CSR, NULL, "-dm_plex_csr_mat_view");CHKERRQ(ierr);
379 
380   /* extract local part of the CSR */
381   ierr = MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);CHKERRQ(ierr);
382   ierr = MatDestroy(&CSR);CHKERRQ(ierr);
383   ierr = MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
384   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
385 
386   /* get back requested output */
387   if (numVertices) *numVertices = m;
388   if (offsets) {
389     ierr = PetscCalloc1(m+1, &idxs);CHKERRQ(ierr);
390     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
391     *offsets = idxs;
392   }
393   if (adjacency) {
394     ierr = PetscMalloc1(ii[m] - m, &idxs);CHKERRQ(ierr);
395     ierr = ISGetIndices(cis_own, &rows);CHKERRQ(ierr);
396     for (i = 0, c = 0; i < m; i++) {
397       PetscInt j, g = rows[i];
398 
399       for (j = ii[i]; j < ii[i+1]; j++) {
400         if (jj[j] == g) continue; /* again, self-connectivity */
401         idxs[c++] = jj[j];
402       }
403     }
404     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
405     ierr = ISRestoreIndices(cis_own, &rows);CHKERRQ(ierr);
406     *adjacency = idxs;
407   }
408 
409   /* cleanup */
410   ierr = ISDestroy(&cis_own);CHKERRQ(ierr);
411   ierr = MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
412   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
413   ierr = MatDestroy(&conn);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
419 
420   Input Parameters:
421 + dm      - The mesh DM dm
422 - height  - Height of the strata from which to construct the graph
423 
424   Output Parameter:
425 + numVertices     - Number of vertices in the graph
426 . offsets         - Point offsets in the graph
427 . adjacency       - Point connectivity in the graph
428 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
429 
430   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
431   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().
432 
433   Level: developer
434 
435 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
436 @*/
437 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
438 {
439   PetscErrorCode ierr;
440   PetscBool      usemat = PETSC_FALSE;
441 
442   PetscFunctionBegin;
443   ierr = PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);CHKERRQ(ierr);
444   if (usemat) {
445     ierr = DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
446   } else {
447     ierr = DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
448   }
449   PetscFunctionReturn(0);
450 }
451 
452 /*@C
453   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
454 
455   Collective
456 
457   Input Arguments:
458 + dm - The DMPlex
459 - cellHeight - The height of mesh points to treat as cells (default should be 0)
460 
461   Output Arguments:
462 + numVertices - The number of local vertices in the graph, or cells in the mesh.
463 . offsets     - The offset to the adjacency list for each cell
464 - adjacency   - The adjacency list for all cells
465 
466   Note: This is suitable for input to a mesh partitioner like ParMetis.
467 
468   Level: advanced
469 
470 .seealso: DMPlexCreate()
471 @*/
472 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
473 {
474   const PetscInt maxFaceCases = 30;
475   PetscInt       numFaceCases = 0;
476   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
477   PetscInt      *off, *adj;
478   PetscInt      *neighborCells = NULL;
479   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
480   PetscErrorCode ierr;
481 
482   PetscFunctionBegin;
483   /* For parallel partitioning, I think you have to communicate supports */
484   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
485   cellDim = dim - cellHeight;
486   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
487   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
488   if (cEnd - cStart == 0) {
489     if (numVertices) *numVertices = 0;
490     if (offsets)   *offsets   = NULL;
491     if (adjacency) *adjacency = NULL;
492     PetscFunctionReturn(0);
493   }
494   numCells  = cEnd - cStart;
495   faceDepth = depth - cellHeight;
496   if (dim == depth) {
497     PetscInt f, fStart, fEnd;
498 
499     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
500     /* Count neighboring cells */
501     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
502     for (f = fStart; f < fEnd; ++f) {
503       const PetscInt *support;
504       PetscInt        supportSize;
505       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
506       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
507       if (supportSize == 2) {
508         PetscInt numChildren;
509 
510         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
511         if (!numChildren) {
512           ++off[support[0]-cStart+1];
513           ++off[support[1]-cStart+1];
514         }
515       }
516     }
517     /* Prefix sum */
518     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
519     if (adjacency) {
520       PetscInt *tmp;
521 
522       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
523       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
524       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
525       /* Get neighboring cells */
526       for (f = fStart; f < fEnd; ++f) {
527         const PetscInt *support;
528         PetscInt        supportSize;
529         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
530         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
531         if (supportSize == 2) {
532           PetscInt numChildren;
533 
534           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
535           if (!numChildren) {
536             adj[tmp[support[0]-cStart]++] = support[1];
537             adj[tmp[support[1]-cStart]++] = support[0];
538           }
539         }
540       }
541 #if defined(PETSC_USE_DEBUG)
542       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
543 #endif
544       ierr = PetscFree(tmp);CHKERRQ(ierr);
545     }
546     if (numVertices) *numVertices = numCells;
547     if (offsets)   *offsets   = off;
548     if (adjacency) *adjacency = adj;
549     PetscFunctionReturn(0);
550   }
551   /* Setup face recognition */
552   if (faceDepth == 1) {
553     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
554 
555     for (c = cStart; c < cEnd; ++c) {
556       PetscInt corners;
557 
558       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
559       if (!cornersSeen[corners]) {
560         PetscInt nFV;
561 
562         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
563         cornersSeen[corners] = 1;
564 
565         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
566 
567         numFaceVertices[numFaceCases++] = nFV;
568       }
569     }
570   }
571   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
572   /* Count neighboring cells */
573   for (cell = cStart; cell < cEnd; ++cell) {
574     PetscInt numNeighbors = PETSC_DETERMINE, n;
575 
576     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
577     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
578     for (n = 0; n < numNeighbors; ++n) {
579       PetscInt        cellPair[2];
580       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
581       PetscInt        meetSize = 0;
582       const PetscInt *meet    = NULL;
583 
584       cellPair[0] = cell; cellPair[1] = neighborCells[n];
585       if (cellPair[0] == cellPair[1]) continue;
586       if (!found) {
587         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
588         if (meetSize) {
589           PetscInt f;
590 
591           for (f = 0; f < numFaceCases; ++f) {
592             if (numFaceVertices[f] == meetSize) {
593               found = PETSC_TRUE;
594               break;
595             }
596           }
597         }
598         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
599       }
600       if (found) ++off[cell-cStart+1];
601     }
602   }
603   /* Prefix sum */
604   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
605 
606   if (adjacency) {
607     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
608     /* Get neighboring cells */
609     for (cell = cStart; cell < cEnd; ++cell) {
610       PetscInt numNeighbors = PETSC_DETERMINE, n;
611       PetscInt cellOffset   = 0;
612 
613       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
614       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
615       for (n = 0; n < numNeighbors; ++n) {
616         PetscInt        cellPair[2];
617         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
618         PetscInt        meetSize = 0;
619         const PetscInt *meet    = NULL;
620 
621         cellPair[0] = cell; cellPair[1] = neighborCells[n];
622         if (cellPair[0] == cellPair[1]) continue;
623         if (!found) {
624           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
625           if (meetSize) {
626             PetscInt f;
627 
628             for (f = 0; f < numFaceCases; ++f) {
629               if (numFaceVertices[f] == meetSize) {
630                 found = PETSC_TRUE;
631                 break;
632               }
633             }
634           }
635           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
636         }
637         if (found) {
638           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
639           ++cellOffset;
640         }
641       }
642     }
643   }
644   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
645   if (numVertices) *numVertices = numCells;
646   if (offsets)   *offsets   = off;
647   if (adjacency) *adjacency = adj;
648   PetscFunctionReturn(0);
649 }
650 
651 /*@C
652   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
653 
654   Not Collective
655 
656   Input Parameters:
657 + name        - The name of a new user-defined creation routine
658 - create_func - The creation routine itself
659 
660   Notes:
661   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
662 
663   Sample usage:
664 .vb
665     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
666 .ve
667 
668   Then, your PetscPartitioner type can be chosen with the procedural interface via
669 .vb
670     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
671     PetscPartitionerSetType(PetscPartitioner, "my_part");
672 .ve
673    or at runtime via the option
674 .vb
675     -petscpartitioner_type my_part
676 .ve
677 
678   Level: advanced
679 
680 .keywords: PetscPartitioner, register
681 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
682 
683 @*/
684 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
685 {
686   PetscErrorCode ierr;
687 
688   PetscFunctionBegin;
689   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
690   PetscFunctionReturn(0);
691 }
692 
693 /*@C
694   PetscPartitionerSetType - Builds a particular PetscPartitioner
695 
696   Collective on PetscPartitioner
697 
698   Input Parameters:
699 + part - The PetscPartitioner object
700 - name - The kind of partitioner
701 
702   Options Database Key:
703 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
704 
705   Level: intermediate
706 
707 .keywords: PetscPartitioner, set, type
708 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
709 @*/
710 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
711 {
712   PetscErrorCode (*r)(PetscPartitioner);
713   PetscBool      match;
714   PetscErrorCode ierr;
715 
716   PetscFunctionBegin;
717   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
718   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
719   if (match) PetscFunctionReturn(0);
720 
721   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
722   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
723   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
724 
725   if (part->ops->destroy) {
726     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
727   }
728   part->noGraph = PETSC_FALSE;
729   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
730   ierr = (*r)(part);CHKERRQ(ierr);
731   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
732   PetscFunctionReturn(0);
733 }
734 
735 /*@C
736   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
737 
738   Not Collective
739 
740   Input Parameter:
741 . part - The PetscPartitioner
742 
743   Output Parameter:
744 . name - The PetscPartitioner type name
745 
746   Level: intermediate
747 
748 .keywords: PetscPartitioner, get, type, name
749 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
750 @*/
751 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
752 {
753   PetscErrorCode ierr;
754 
755   PetscFunctionBegin;
756   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
757   PetscValidPointer(name, 2);
758   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
759   *name = ((PetscObject) part)->type_name;
760   PetscFunctionReturn(0);
761 }
762 
763 /*@C
764   PetscPartitionerView - Views a PetscPartitioner
765 
766   Collective on PetscPartitioner
767 
768   Input Parameter:
769 + part - the PetscPartitioner object to view
770 - v    - the viewer
771 
772   Level: developer
773 
774 .seealso: PetscPartitionerDestroy()
775 @*/
776 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
777 {
778   PetscMPIInt    size;
779   PetscBool      isascii;
780   PetscErrorCode ierr;
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
784   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
785   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
786   if (isascii) {
787     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
788     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
789     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
790     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
791     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
792     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
793     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
794   }
795   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
796   PetscFunctionReturn(0);
797 }
798 
799 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
800 {
801   PetscFunctionBegin;
802   if (!currentType) {
803 #if defined(PETSC_HAVE_CHACO)
804     *defaultType = PETSCPARTITIONERCHACO;
805 #elif defined(PETSC_HAVE_PARMETIS)
806     *defaultType = PETSCPARTITIONERPARMETIS;
807 #elif defined(PETSC_HAVE_PTSCOTCH)
808     *defaultType = PETSCPARTITIONERPTSCOTCH;
809 #else
810     *defaultType = PETSCPARTITIONERSIMPLE;
811 #endif
812   } else {
813     *defaultType = currentType;
814   }
815   PetscFunctionReturn(0);
816 }
817 
818 /*@
819   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
820 
821   Collective on PetscPartitioner
822 
823   Input Parameter:
824 . part - the PetscPartitioner object to set options for
825 
826   Level: developer
827 
828 .seealso: PetscPartitionerView()
829 @*/
830 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
831 {
832   const char    *defaultType;
833   char           name[256];
834   PetscBool      flg;
835   PetscErrorCode ierr;
836 
837   PetscFunctionBegin;
838   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
839   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
840   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
841   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
842   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
843   if (flg) {
844     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
845   } else if (!((PetscObject) part)->type_name) {
846     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
847   }
848   if (part->ops->setfromoptions) {
849     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
850   }
851   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
852   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
853   /* process any options handlers added with PetscObjectAddOptionsHandler() */
854   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
855   ierr = PetscOptionsEnd();CHKERRQ(ierr);
856   PetscFunctionReturn(0);
857 }
858 
859 /*@C
860   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
861 
862   Collective on PetscPartitioner
863 
864   Input Parameter:
865 . part - the PetscPartitioner object to setup
866 
867   Level: developer
868 
869 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
870 @*/
871 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
872 {
873   PetscErrorCode ierr;
874 
875   PetscFunctionBegin;
876   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
877   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
878   PetscFunctionReturn(0);
879 }
880 
881 /*@
882   PetscPartitionerDestroy - Destroys a PetscPartitioner object
883 
884   Collective on PetscPartitioner
885 
886   Input Parameter:
887 . part - the PetscPartitioner object to destroy
888 
889   Level: developer
890 
891 .seealso: PetscPartitionerView()
892 @*/
893 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
894 {
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   if (!*part) PetscFunctionReturn(0);
899   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
900 
901   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
902   ((PetscObject) (*part))->refct = 0;
903 
904   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
905   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
906   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
907   PetscFunctionReturn(0);
908 }
909 
910 /*@
911   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
912 
913   Collective on MPI_Comm
914 
915   Input Parameter:
916 . comm - The communicator for the PetscPartitioner object
917 
918   Output Parameter:
919 . part - The PetscPartitioner object
920 
921   Level: beginner
922 
923 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
924 @*/
925 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
926 {
927   PetscPartitioner p;
928   const char       *partitionerType = NULL;
929   PetscErrorCode   ierr;
930 
931   PetscFunctionBegin;
932   PetscValidPointer(part, 2);
933   *part = NULL;
934   ierr = DMInitializePackage();CHKERRQ(ierr);
935 
936   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
937   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
938   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
939 
940   p->edgeCut = 0;
941   p->balance = 0.0;
942 
943   *part = p;
944   PetscFunctionReturn(0);
945 }
946 
947 /*@
948   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
949 
950   Collective on DM
951 
952   Input Parameters:
953 + part    - The PetscPartitioner
954 - dm      - The mesh DM
955 
956   Output Parameters:
957 + partSection     - The PetscSection giving the division of points by partition
958 - partition       - The list of points by partition
959 
960   Options Database:
961 . -petscpartitioner_view_graph - View the graph we are partitioning
962 
963   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
964 
965   Level: developer
966 
967 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
968 @*/
969 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
970 {
971   PetscMPIInt    size;
972   PetscErrorCode ierr;
973 
974   PetscFunctionBegin;
975   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
976   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
977   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
978   PetscValidPointer(partition, 5);
979   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
980   if (size == 1) {
981     PetscInt *points;
982     PetscInt  cStart, cEnd, c;
983 
984     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
985     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
986     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
987     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
988     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
989     for (c = cStart; c < cEnd; ++c) points[c] = c;
990     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
991     PetscFunctionReturn(0);
992   }
993   if (part->height == 0) {
994     PetscInt numVertices = 0;
995     PetscInt *start     = NULL;
996     PetscInt *adjacency = NULL;
997     IS       globalNumbering;
998 
999     if (!part->noGraph || part->viewGraph) {
1000       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
1001     } else {
1002       const PetscInt *idxs;
1003       PetscInt       p, pStart, pEnd;
1004 
1005       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1006       ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
1007       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1008       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
1009       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1010     }
1011     if (part->viewGraph) {
1012       PetscViewer viewer = part->viewerGraph;
1013       PetscBool   isascii;
1014       PetscInt    v, i;
1015       PetscMPIInt rank;
1016 
1017       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
1018       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
1019       if (isascii) {
1020         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1021         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
1022         for (v = 0; v < numVertices; ++v) {
1023           const PetscInt s = start[v];
1024           const PetscInt e = start[v+1];
1025 
1026           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
1027           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
1028           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
1029         }
1030         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1031         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1032       }
1033     }
1034     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no partitioning method");
1035     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
1036     ierr = PetscFree(start);CHKERRQ(ierr);
1037     ierr = PetscFree(adjacency);CHKERRQ(ierr);
1038     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1039       const PetscInt *globalNum;
1040       const PetscInt *partIdx;
1041       PetscInt       *map, cStart, cEnd;
1042       PetscInt       *adjusted, i, localSize, offset;
1043       IS             newPartition;
1044 
1045       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
1046       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
1047       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1048       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
1049       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
1050       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1051       for (i = cStart, offset = 0; i < cEnd; i++) {
1052         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1053       }
1054       for (i = 0; i < localSize; i++) {
1055         adjusted[i] = map[partIdx[i]];
1056       }
1057       ierr = PetscFree(map);CHKERRQ(ierr);
1058       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
1059       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1060       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
1061       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
1062       ierr = ISDestroy(partition);CHKERRQ(ierr);
1063       *partition = newPartition;
1064     }
1065   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1066   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1071 {
1072   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1073   PetscErrorCode          ierr;
1074 
1075   PetscFunctionBegin;
1076   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1077   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1078   ierr = PetscFree(p);CHKERRQ(ierr);
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1083 {
1084   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1085   PetscErrorCode          ierr;
1086 
1087   PetscFunctionBegin;
1088   if (p->random) {
1089     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1090     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
1091     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1092   }
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1097 {
1098   PetscBool      iascii;
1099   PetscErrorCode ierr;
1100 
1101   PetscFunctionBegin;
1102   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1103   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1104   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1105   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
1106   PetscFunctionReturn(0);
1107 }
1108 
1109 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1110 {
1111   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1112   PetscErrorCode          ierr;
1113 
1114   PetscFunctionBegin;
1115   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
1116   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
1117   ierr = PetscOptionsTail();CHKERRQ(ierr);
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1122 {
1123   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1124   PetscInt                np;
1125   PetscErrorCode          ierr;
1126 
1127   PetscFunctionBegin;
1128   if (p->random) {
1129     PetscRandom r;
1130     PetscInt   *sizes, *points, v, p;
1131     PetscMPIInt rank;
1132 
1133     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1134     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1135     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
1136     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
1137     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
1138     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1139     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1140     for (v = numVertices-1; v > 0; --v) {
1141       PetscReal val;
1142       PetscInt  w, tmp;
1143 
1144       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
1145       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
1146       w    = PetscFloorReal(val);
1147       tmp       = points[v];
1148       points[v] = points[w];
1149       points[w] = tmp;
1150     }
1151     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1152     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
1153     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
1154   }
1155   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1156   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
1157   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1158   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
1159   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1160   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
1161   *partition = p->partition;
1162   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
1163   PetscFunctionReturn(0);
1164 }
1165 
1166 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1167 {
1168   PetscFunctionBegin;
1169   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1170   part->ops->view           = PetscPartitionerView_Shell;
1171   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1172   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1173   part->ops->partition      = PetscPartitionerPartition_Shell;
1174   PetscFunctionReturn(0);
1175 }
1176 
1177 /*MC
1178   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
1179 
1180   Level: intermediate
1181 
1182 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1183 M*/
1184 
1185 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1186 {
1187   PetscPartitioner_Shell *p;
1188   PetscErrorCode          ierr;
1189 
1190   PetscFunctionBegin;
1191   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1192   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1193   part->data = p;
1194 
1195   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1196   p->random = PETSC_FALSE;
1197   PetscFunctionReturn(0);
1198 }
1199 
1200 /*@C
1201   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1202 
1203   Collective on Part
1204 
1205   Input Parameters:
1206 + part   - The PetscPartitioner
1207 . size   - The number of partitions
1208 . sizes  - array of size size (or NULL) providing the number of points in each partition
1209 - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1210 
1211   Level: developer
1212 
1213   Notes:
1214 
1215     It is safe to free the sizes and points arrays after use in this routine.
1216 
1217 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1218 @*/
1219 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1220 {
1221   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1222   PetscInt                proc, numPoints;
1223   PetscErrorCode          ierr;
1224 
1225   PetscFunctionBegin;
1226   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1227   if (sizes)  {PetscValidPointer(sizes, 3);}
1228   if (points) {PetscValidPointer(points, 4);}
1229   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1230   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1231   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1232   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1233   if (sizes) {
1234     for (proc = 0; proc < size; ++proc) {
1235       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1236     }
1237   }
1238   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1239   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1240   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1241   PetscFunctionReturn(0);
1242 }
1243 
1244 /*@
1245   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1246 
1247   Collective on Part
1248 
1249   Input Parameters:
1250 + part   - The PetscPartitioner
1251 - random - The flag to use a random partition
1252 
1253   Level: intermediate
1254 
1255 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1256 @*/
1257 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1258 {
1259   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1260 
1261   PetscFunctionBegin;
1262   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1263   p->random = random;
1264   PetscFunctionReturn(0);
1265 }
1266 
1267 /*@
1268   PetscPartitionerShellGetRandom - get the flag to use a random partition
1269 
1270   Collective on Part
1271 
1272   Input Parameter:
1273 . part   - The PetscPartitioner
1274 
1275   Output Parameter
1276 . random - The flag to use a random partition
1277 
1278   Level: intermediate
1279 
1280 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1281 @*/
1282 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1283 {
1284   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1285 
1286   PetscFunctionBegin;
1287   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1288   PetscValidPointer(random, 2);
1289   *random = p->random;
1290   PetscFunctionReturn(0);
1291 }
1292 
1293 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1294 {
1295   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1296   PetscErrorCode          ierr;
1297 
1298   PetscFunctionBegin;
1299   ierr = PetscFree(p);CHKERRQ(ierr);
1300   PetscFunctionReturn(0);
1301 }
1302 
1303 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1304 {
1305   PetscFunctionBegin;
1306   PetscFunctionReturn(0);
1307 }
1308 
1309 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1310 {
1311   PetscBool      iascii;
1312   PetscErrorCode ierr;
1313 
1314   PetscFunctionBegin;
1315   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1316   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1317   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1318   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1323 {
1324   MPI_Comm       comm;
1325   PetscInt       np;
1326   PetscMPIInt    size;
1327   PetscErrorCode ierr;
1328 
1329   PetscFunctionBegin;
1330   comm = PetscObjectComm((PetscObject)part);
1331   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1332   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1333   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1334   if (size == 1) {
1335     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1336   } else {
1337     PetscMPIInt rank;
1338     PetscInt nvGlobal, *offsets, myFirst, myLast;
1339 
1340     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1341     offsets[0] = 0;
1342     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1343     for (np = 2; np <= size; np++) {
1344       offsets[np] += offsets[np-1];
1345     }
1346     nvGlobal = offsets[size];
1347     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1348     myFirst = offsets[rank];
1349     myLast  = offsets[rank + 1] - 1;
1350     ierr = PetscFree(offsets);CHKERRQ(ierr);
1351     if (numVertices) {
1352       PetscInt firstPart = 0, firstLargePart = 0;
1353       PetscInt lastPart = 0, lastLargePart = 0;
1354       PetscInt rem = nvGlobal % nparts;
1355       PetscInt pSmall = nvGlobal/nparts;
1356       PetscInt pBig = nvGlobal/nparts + 1;
1357 
1358 
1359       if (rem) {
1360         firstLargePart = myFirst / pBig;
1361         lastLargePart  = myLast  / pBig;
1362 
1363         if (firstLargePart < rem) {
1364           firstPart = firstLargePart;
1365         } else {
1366           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1367         }
1368         if (lastLargePart < rem) {
1369           lastPart = lastLargePart;
1370         } else {
1371           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1372         }
1373       } else {
1374         firstPart = myFirst / (nvGlobal/nparts);
1375         lastPart  = myLast  / (nvGlobal/nparts);
1376       }
1377 
1378       for (np = firstPart; np <= lastPart; np++) {
1379         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1380         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1381 
1382         PartStart = PetscMax(PartStart,myFirst);
1383         PartEnd   = PetscMin(PartEnd,myLast+1);
1384         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1385       }
1386     }
1387   }
1388   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1389   PetscFunctionReturn(0);
1390 }
1391 
1392 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1393 {
1394   PetscFunctionBegin;
1395   part->noGraph        = PETSC_TRUE;
1396   part->ops->view      = PetscPartitionerView_Simple;
1397   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1398   part->ops->partition = PetscPartitionerPartition_Simple;
1399   PetscFunctionReturn(0);
1400 }
1401 
1402 /*MC
1403   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1404 
1405   Level: intermediate
1406 
1407 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1408 M*/
1409 
1410 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1411 {
1412   PetscPartitioner_Simple *p;
1413   PetscErrorCode           ierr;
1414 
1415   PetscFunctionBegin;
1416   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1417   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1418   part->data = p;
1419 
1420   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1421   PetscFunctionReturn(0);
1422 }
1423 
1424 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1425 {
1426   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1427   PetscErrorCode          ierr;
1428 
1429   PetscFunctionBegin;
1430   ierr = PetscFree(p);CHKERRQ(ierr);
1431   PetscFunctionReturn(0);
1432 }
1433 
1434 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1435 {
1436   PetscFunctionBegin;
1437   PetscFunctionReturn(0);
1438 }
1439 
1440 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1441 {
1442   PetscBool      iascii;
1443   PetscErrorCode ierr;
1444 
1445   PetscFunctionBegin;
1446   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1447   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1448   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1449   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1450   PetscFunctionReturn(0);
1451 }
1452 
1453 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1454 {
1455   PetscInt       np;
1456   PetscErrorCode ierr;
1457 
1458   PetscFunctionBegin;
1459   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1460   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1461   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1462   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1463   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1464   PetscFunctionReturn(0);
1465 }
1466 
1467 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1468 {
1469   PetscFunctionBegin;
1470   part->noGraph        = PETSC_TRUE;
1471   part->ops->view      = PetscPartitionerView_Gather;
1472   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1473   part->ops->partition = PetscPartitionerPartition_Gather;
1474   PetscFunctionReturn(0);
1475 }
1476 
1477 /*MC
1478   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1479 
1480   Level: intermediate
1481 
1482 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1483 M*/
1484 
1485 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1486 {
1487   PetscPartitioner_Gather *p;
1488   PetscErrorCode           ierr;
1489 
1490   PetscFunctionBegin;
1491   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1492   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1493   part->data = p;
1494 
1495   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1496   PetscFunctionReturn(0);
1497 }
1498 
1499 
1500 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1501 {
1502   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1503   PetscErrorCode          ierr;
1504 
1505   PetscFunctionBegin;
1506   ierr = PetscFree(p);CHKERRQ(ierr);
1507   PetscFunctionReturn(0);
1508 }
1509 
1510 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1511 {
1512   PetscFunctionBegin;
1513   PetscFunctionReturn(0);
1514 }
1515 
1516 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1517 {
1518   PetscBool      iascii;
1519   PetscErrorCode ierr;
1520 
1521   PetscFunctionBegin;
1522   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1523   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1524   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1525   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1526   PetscFunctionReturn(0);
1527 }
1528 
1529 #if defined(PETSC_HAVE_CHACO)
1530 #if defined(PETSC_HAVE_UNISTD_H)
1531 #include <unistd.h>
1532 #endif
1533 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1534 #include <chaco.h>
1535 #else
1536 /* Older versions of Chaco do not have an include file */
1537 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1538                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1539                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1540                        int mesh_dims[3], double *goal, int global_method, int local_method,
1541                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1542 #endif
1543 extern int FREE_GRAPH;
1544 #endif
1545 
1546 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1547 {
1548 #if defined(PETSC_HAVE_CHACO)
1549   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1550   MPI_Comm       comm;
1551   int            nvtxs          = numVertices; /* number of vertices in full graph */
1552   int           *vwgts          = NULL;   /* weights for all vertices */
1553   float         *ewgts          = NULL;   /* weights for all edges */
1554   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1555   char          *outassignname  = NULL;   /*  name of assignment output file */
1556   char          *outfilename    = NULL;   /* output file name */
1557   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1558   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1559   int            mesh_dims[3];            /* dimensions of mesh of processors */
1560   double        *goal          = NULL;    /* desired set sizes for each set */
1561   int            global_method = 1;       /* global partitioning algorithm */
1562   int            local_method  = 1;       /* local partitioning algorithm */
1563   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1564   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1565   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1566   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1567   long           seed          = 123636512; /* for random graph mutations */
1568 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1569   int           *assignment;              /* Output partition */
1570 #else
1571   short int     *assignment;              /* Output partition */
1572 #endif
1573   int            fd_stdout, fd_pipe[2];
1574   PetscInt      *points;
1575   int            i, v, p;
1576   PetscErrorCode ierr;
1577 
1578   PetscFunctionBegin;
1579   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1580 #if defined (PETSC_USE_DEBUG)
1581   {
1582     int ival,isum;
1583     PetscBool distributed;
1584 
1585     ival = (numVertices > 0);
1586     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1587     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1588     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1589   }
1590 #endif
1591   if (!numVertices) {
1592     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1593     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1594     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1595     PetscFunctionReturn(0);
1596   }
1597   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1598   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1599 
1600   if (global_method == INERTIAL_METHOD) {
1601     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1602     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1603   }
1604   mesh_dims[0] = nparts;
1605   mesh_dims[1] = 1;
1606   mesh_dims[2] = 1;
1607   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1608   /* Chaco outputs to stdout. We redirect this to a buffer. */
1609   /* TODO: check error codes for UNIX calls */
1610 #if defined(PETSC_HAVE_UNISTD_H)
1611   {
1612     int piperet;
1613     piperet = pipe(fd_pipe);
1614     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1615     fd_stdout = dup(1);
1616     close(1);
1617     dup2(fd_pipe[1], 1);
1618   }
1619 #endif
1620   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1621                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1622                    vmax, ndims, eigtol, seed);
1623 #if defined(PETSC_HAVE_UNISTD_H)
1624   {
1625     char msgLog[10000];
1626     int  count;
1627 
1628     fflush(stdout);
1629     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1630     if (count < 0) count = 0;
1631     msgLog[count] = 0;
1632     close(1);
1633     dup2(fd_stdout, 1);
1634     close(fd_stdout);
1635     close(fd_pipe[0]);
1636     close(fd_pipe[1]);
1637     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1638   }
1639 #else
1640   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1641 #endif
1642   /* Convert to PetscSection+IS */
1643   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1644   for (v = 0; v < nvtxs; ++v) {
1645     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1646   }
1647   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1648   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1649   for (p = 0, i = 0; p < nparts; ++p) {
1650     for (v = 0; v < nvtxs; ++v) {
1651       if (assignment[v] == p) points[i++] = v;
1652     }
1653   }
1654   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1655   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1656   if (global_method == INERTIAL_METHOD) {
1657     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1658   }
1659   ierr = PetscFree(assignment);CHKERRQ(ierr);
1660   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1661   PetscFunctionReturn(0);
1662 #else
1663   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1664 #endif
1665 }
1666 
1667 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1668 {
1669   PetscFunctionBegin;
1670   part->noGraph        = PETSC_FALSE;
1671   part->ops->view      = PetscPartitionerView_Chaco;
1672   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1673   part->ops->partition = PetscPartitionerPartition_Chaco;
1674   PetscFunctionReturn(0);
1675 }
1676 
1677 /*MC
1678   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1679 
1680   Level: intermediate
1681 
1682 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1683 M*/
1684 
1685 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1686 {
1687   PetscPartitioner_Chaco *p;
1688   PetscErrorCode          ierr;
1689 
1690   PetscFunctionBegin;
1691   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1692   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1693   part->data = p;
1694 
1695   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1696   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1697   PetscFunctionReturn(0);
1698 }
1699 
1700 static const char *ptypes[] = {"kway", "rb"};
1701 
1702 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1703 {
1704   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1705   PetscErrorCode             ierr;
1706 
1707   PetscFunctionBegin;
1708   ierr = PetscFree(p);CHKERRQ(ierr);
1709   PetscFunctionReturn(0);
1710 }
1711 
1712 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1713 {
1714   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1715   PetscErrorCode             ierr;
1716 
1717   PetscFunctionBegin;
1718   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1719   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1720   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1721   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1722   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1723   PetscFunctionReturn(0);
1724 }
1725 
1726 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1727 {
1728   PetscBool      iascii;
1729   PetscErrorCode ierr;
1730 
1731   PetscFunctionBegin;
1732   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1733   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1734   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1735   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1736   PetscFunctionReturn(0);
1737 }
1738 
1739 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1740 {
1741   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1742   PetscInt                  p_randomSeed = -1; /* TODO: Add this to PetscPartitioner_ParMetis */
1743   PetscErrorCode            ierr;
1744 
1745   PetscFunctionBegin;
1746   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1747   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1748   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1749   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1750   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p_randomSeed, &p_randomSeed, NULL);CHKERRQ(ierr);
1751   ierr = PetscOptionsTail();CHKERRQ(ierr);
1752   PetscFunctionReturn(0);
1753 }
1754 
1755 #if defined(PETSC_HAVE_PARMETIS)
1756 #include <parmetis.h>
1757 #endif
1758 
1759 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1760 {
1761 #if defined(PETSC_HAVE_PARMETIS)
1762   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1763   MPI_Comm       comm;
1764   PetscSection   section;
1765   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1766   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1767   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1768   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1769   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1770   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1771   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1772   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1773   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1774   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1775   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1776   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1777   PetscInt       options[64];               /* Options */
1778   /* Outputs */
1779   PetscInt       v, i, *assignment, *points;
1780   PetscMPIInt    size, rank, p;
1781   PetscInt       pm_randomSeed = -1;
1782   PetscErrorCode ierr;
1783 
1784   PetscFunctionBegin;
1785   ierr = PetscOptionsGetInt(((PetscObject)part)->options,((PetscObject)part)->prefix,"-petscpartitioner_parmetis_seed", &pm_randomSeed, NULL);CHKERRQ(ierr);
1786   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1787   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1788   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1789   /* Calculate vertex distribution */
1790   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1791   vtxdist[0] = 0;
1792   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1793   for (p = 2; p <= size; ++p) {
1794     vtxdist[p] += vtxdist[p-1];
1795   }
1796   /* Calculate partition weights */
1797   for (p = 0; p < nparts; ++p) {
1798     tpwgts[p] = 1.0/nparts;
1799   }
1800   ubvec[0] = pm->imbalanceRatio;
1801   /* Weight cells by dofs on cell by default */
1802   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1803   if (section) {
1804     PetscInt cStart, cEnd, dof;
1805 
1806     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1807     for (v = cStart; v < cEnd; ++v) {
1808       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1809       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1810       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1811       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1812     }
1813   } else {
1814     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1815   }
1816   wgtflag |= 2; /* have weights on graph vertices */
1817 
1818   if (nparts == 1) {
1819     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1820   } else {
1821     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1822     if (vtxdist[p+1] == vtxdist[size]) {
1823       if (rank == p) {
1824         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1825         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1826         options[METIS_OPTION_SEED]   = pm_randomSeed;
1827         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1828         if (metis_ptype == 1) {
1829           PetscStackPush("METIS_PartGraphRecursive");
1830           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1831           PetscStackPop;
1832           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1833         } else {
1834           /*
1835            It would be nice to activate the two options below, but they would need some actual testing.
1836            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1837            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1838           */
1839           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1840           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1841           PetscStackPush("METIS_PartGraphKway");
1842           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1843           PetscStackPop;
1844           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1845         }
1846       }
1847     } else {
1848       options[0] = 1; /*use options */
1849       options[1] = pm->debugFlag;
1850       options[2] = (pm_randomSeed == -1) ? 15 : pm_randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1851       PetscStackPush("ParMETIS_V3_PartKway");
1852       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1853       PetscStackPop;
1854       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1855     }
1856   }
1857   /* Convert to PetscSection+IS */
1858   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1859   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1860   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1861   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1862   for (p = 0, i = 0; p < nparts; ++p) {
1863     for (v = 0; v < nvtxs; ++v) {
1864       if (assignment[v] == p) points[i++] = v;
1865     }
1866   }
1867   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1868   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1869   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1870   PetscFunctionReturn(0);
1871 #else
1872   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1873 #endif
1874 }
1875 
1876 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1877 {
1878   PetscFunctionBegin;
1879   part->noGraph             = PETSC_FALSE;
1880   part->ops->view           = PetscPartitionerView_ParMetis;
1881   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1882   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1883   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1884   PetscFunctionReturn(0);
1885 }
1886 
1887 /*MC
1888   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1889 
1890   Level: intermediate
1891 
1892 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1893 M*/
1894 
1895 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1896 {
1897   PetscPartitioner_ParMetis *p;
1898   PetscErrorCode          ierr;
1899 
1900   PetscFunctionBegin;
1901   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1902   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1903   part->data = p;
1904 
1905   p->ptype          = 0;
1906   p->imbalanceRatio = 1.05;
1907   p->debugFlag      = 0;
1908 
1909   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1910   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1911   PetscFunctionReturn(0);
1912 }
1913 
1914 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1915 const char PTScotchPartitionerCitation[] =
1916   "@article{PTSCOTCH,\n"
1917   "  author  = {C. Chevalier and F. Pellegrini},\n"
1918   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1919   "  journal = {Parallel Computing},\n"
1920   "  volume  = {34},\n"
1921   "  number  = {6},\n"
1922   "  pages   = {318--331},\n"
1923   "  year    = {2008},\n"
1924   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1925   "}\n";
1926 
1927 typedef struct {
1928   PetscInt  strategy;
1929   PetscReal imbalance;
1930 } PetscPartitioner_PTScotch;
1931 
1932 static const char *const
1933 PTScotchStrategyList[] = {
1934   "DEFAULT",
1935   "QUALITY",
1936   "SPEED",
1937   "BALANCE",
1938   "SAFETY",
1939   "SCALABILITY",
1940   "RECURSIVE",
1941   "REMAP"
1942 };
1943 
1944 #if defined(PETSC_HAVE_PTSCOTCH)
1945 
1946 EXTERN_C_BEGIN
1947 #include <ptscotch.h>
1948 EXTERN_C_END
1949 
1950 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1951 
1952 static int PTScotch_Strategy(PetscInt strategy)
1953 {
1954   switch (strategy) {
1955   case  0: return SCOTCH_STRATDEFAULT;
1956   case  1: return SCOTCH_STRATQUALITY;
1957   case  2: return SCOTCH_STRATSPEED;
1958   case  3: return SCOTCH_STRATBALANCE;
1959   case  4: return SCOTCH_STRATSAFETY;
1960   case  5: return SCOTCH_STRATSCALABILITY;
1961   case  6: return SCOTCH_STRATRECURSIVE;
1962   case  7: return SCOTCH_STRATREMAP;
1963   default: return SCOTCH_STRATDEFAULT;
1964   }
1965 }
1966 
1967 
1968 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1969                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1970 {
1971   SCOTCH_Graph   grafdat;
1972   SCOTCH_Strat   stradat;
1973   SCOTCH_Num     vertnbr = n;
1974   SCOTCH_Num     edgenbr = xadj[n];
1975   SCOTCH_Num*    velotab = vtxwgt;
1976   SCOTCH_Num*    edlotab = adjwgt;
1977   SCOTCH_Num     flagval = strategy;
1978   double         kbalval = imbalance;
1979   PetscErrorCode ierr;
1980 
1981   PetscFunctionBegin;
1982   {
1983     PetscBool flg = PETSC_TRUE;
1984     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1985     if (!flg) velotab = NULL;
1986   }
1987   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1988   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1989   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1990   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1991 #if defined(PETSC_USE_DEBUG)
1992   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1993 #endif
1994   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1995   SCOTCH_stratExit(&stradat);
1996   SCOTCH_graphExit(&grafdat);
1997   PetscFunctionReturn(0);
1998 }
1999 
2000 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2001                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
2002 {
2003   PetscMPIInt     procglbnbr;
2004   PetscMPIInt     proclocnum;
2005   SCOTCH_Arch     archdat;
2006   SCOTCH_Dgraph   grafdat;
2007   SCOTCH_Dmapping mappdat;
2008   SCOTCH_Strat    stradat;
2009   SCOTCH_Num      vertlocnbr;
2010   SCOTCH_Num      edgelocnbr;
2011   SCOTCH_Num*     veloloctab = vtxwgt;
2012   SCOTCH_Num*     edloloctab = adjwgt;
2013   SCOTCH_Num      flagval = strategy;
2014   double          kbalval = imbalance;
2015   PetscErrorCode  ierr;
2016 
2017   PetscFunctionBegin;
2018   {
2019     PetscBool flg = PETSC_TRUE;
2020     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2021     if (!flg) veloloctab = NULL;
2022   }
2023   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
2024   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
2025   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
2026   edgelocnbr = xadj[vertlocnbr];
2027 
2028   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
2029   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
2030 #if defined(PETSC_USE_DEBUG)
2031   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
2032 #endif
2033   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2034   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
2035   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2036   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2037   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
2038 
2039   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2040   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2041   SCOTCH_archExit(&archdat);
2042   SCOTCH_stratExit(&stradat);
2043   SCOTCH_dgraphExit(&grafdat);
2044   PetscFunctionReturn(0);
2045 }
2046 
2047 #endif /* PETSC_HAVE_PTSCOTCH */
2048 
2049 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2050 {
2051   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2052   PetscErrorCode             ierr;
2053 
2054   PetscFunctionBegin;
2055   ierr = PetscFree(p);CHKERRQ(ierr);
2056   PetscFunctionReturn(0);
2057 }
2058 
2059 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2060 {
2061   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2062   PetscErrorCode            ierr;
2063 
2064   PetscFunctionBegin;
2065   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
2066   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
2067   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
2068   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
2069   PetscFunctionReturn(0);
2070 }
2071 
2072 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2073 {
2074   PetscBool      iascii;
2075   PetscErrorCode ierr;
2076 
2077   PetscFunctionBegin;
2078   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2079   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
2080   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
2081   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
2082   PetscFunctionReturn(0);
2083 }
2084 
2085 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2086 {
2087   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2088   const char *const         *slist = PTScotchStrategyList;
2089   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2090   PetscBool                 flag;
2091   PetscErrorCode            ierr;
2092 
2093   PetscFunctionBegin;
2094   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
2095   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
2096   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
2097   ierr = PetscOptionsTail();CHKERRQ(ierr);
2098   PetscFunctionReturn(0);
2099 }
2100 
2101 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
2102 {
2103 #if defined(PETSC_HAVE_PTSCOTCH)
2104   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
2105   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
2106   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
2107   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
2108   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
2109   PetscInt      *vwgt       = NULL;        /* Vertex weights */
2110   PetscInt      *adjwgt     = NULL;        /* Edge weights */
2111   PetscInt       v, i, *assignment, *points;
2112   PetscMPIInt    size, rank, p;
2113   PetscErrorCode ierr;
2114 
2115   PetscFunctionBegin;
2116   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2117   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2118   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
2119 
2120   /* Calculate vertex distribution */
2121   vtxdist[0] = 0;
2122   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2123   for (p = 2; p <= size; ++p) {
2124     vtxdist[p] += vtxdist[p-1];
2125   }
2126 
2127   if (nparts == 1) {
2128     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
2129   } else {
2130     PetscSection section;
2131     /* Weight cells by dofs on cell by default */
2132     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
2133     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
2134     if (section) {
2135       PetscInt vStart, eEnd, dof;
2136       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &eEnd);CHKERRQ(ierr);
2137       for (v = vStart; v < eEnd; ++v) {
2138         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
2139         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
2140         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
2141         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
2142       }
2143     } else {
2144       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
2145     }
2146     {
2147       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2148       int                       strat = PTScotch_Strategy(pts->strategy);
2149       double                    imbal = (double)pts->imbalance;
2150 
2151       for (p = 0; !vtxdist[p+1] && p < size; ++p);
2152       if (vtxdist[p+1] == vtxdist[size]) {
2153         if (rank == p) {
2154           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
2155         }
2156       } else {
2157         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
2158       }
2159     }
2160     ierr = PetscFree(vwgt);CHKERRQ(ierr);
2161   }
2162 
2163   /* Convert to PetscSection+IS */
2164   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
2165   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2166   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
2167   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2168   for (p = 0, i = 0; p < nparts; ++p) {
2169     for (v = 0; v < nvtxs; ++v) {
2170       if (assignment[v] == p) points[i++] = v;
2171     }
2172   }
2173   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2174   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2175 
2176   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
2177   PetscFunctionReturn(0);
2178 #else
2179   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2180 #endif
2181 }
2182 
2183 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2184 {
2185   PetscFunctionBegin;
2186   part->noGraph             = PETSC_FALSE;
2187   part->ops->view           = PetscPartitionerView_PTScotch;
2188   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2189   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2190   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2191   PetscFunctionReturn(0);
2192 }
2193 
2194 /*MC
2195   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2196 
2197   Level: intermediate
2198 
2199 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2200 M*/
2201 
2202 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2203 {
2204   PetscPartitioner_PTScotch *p;
2205   PetscErrorCode          ierr;
2206 
2207   PetscFunctionBegin;
2208   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2209   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2210   part->data = p;
2211 
2212   p->strategy  = 0;
2213   p->imbalance = 0.01;
2214 
2215   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2216   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2217   PetscFunctionReturn(0);
2218 }
2219 
2220 
2221 /*@
2222   DMPlexGetPartitioner - Get the mesh partitioner
2223 
2224   Not collective
2225 
2226   Input Parameter:
2227 . dm - The DM
2228 
2229   Output Parameter:
2230 . part - The PetscPartitioner
2231 
2232   Level: developer
2233 
2234   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2235 
2236 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
2237 @*/
2238 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2239 {
2240   DM_Plex       *mesh = (DM_Plex *) dm->data;
2241 
2242   PetscFunctionBegin;
2243   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2244   PetscValidPointer(part, 2);
2245   *part = mesh->partitioner;
2246   PetscFunctionReturn(0);
2247 }
2248 
2249 /*@
2250   DMPlexSetPartitioner - Set the mesh partitioner
2251 
2252   logically collective on dm and part
2253 
2254   Input Parameters:
2255 + dm - The DM
2256 - part - The partitioner
2257 
2258   Level: developer
2259 
2260   Note: Any existing PetscPartitioner will be destroyed.
2261 
2262 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2263 @*/
2264 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2265 {
2266   DM_Plex       *mesh = (DM_Plex *) dm->data;
2267   PetscErrorCode ierr;
2268 
2269   PetscFunctionBegin;
2270   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2271   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2272   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2273   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2274   mesh->partitioner = part;
2275   PetscFunctionReturn(0);
2276 }
2277 
2278 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2279 {
2280   PetscErrorCode ierr;
2281 
2282   PetscFunctionBegin;
2283   if (up) {
2284     PetscInt parent;
2285 
2286     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2287     if (parent != point) {
2288       PetscInt closureSize, *closure = NULL, i;
2289 
2290       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2291       for (i = 0; i < closureSize; i++) {
2292         PetscInt cpoint = closure[2*i];
2293 
2294         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2295         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2296       }
2297       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2298     }
2299   }
2300   if (down) {
2301     PetscInt numChildren;
2302     const PetscInt *children;
2303 
2304     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2305     if (numChildren) {
2306       PetscInt i;
2307 
2308       for (i = 0; i < numChildren; i++) {
2309         PetscInt cpoint = children[i];
2310 
2311         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2312         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2313       }
2314     }
2315   }
2316   PetscFunctionReturn(0);
2317 }
2318 
2319 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
2320 
2321 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2322 {
2323   DM_Plex        *mesh = (DM_Plex *)dm->data;
2324   PetscBool      hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2325   PetscInt       *closure = NULL, closureSize, nelems, *elems, off = 0, p, c;
2326   PetscHSetI     ht;
2327   PetscErrorCode ierr;
2328 
2329   PetscFunctionBegin;
2330   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2331   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2332   for (p = 0; p < numPoints; ++p) {
2333     ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2334     for (c = 0; c < closureSize*2; c += 2) {
2335       ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2336       if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2337     }
2338   }
2339   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2340   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2341   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2342   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2343   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2344   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2345   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2346   PetscFunctionReturn(0);
2347 }
2348 
2349 /*@
2350   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2351 
2352   Input Parameters:
2353 + dm     - The DM
2354 - label  - DMLabel assinging ranks to remote roots
2355 
2356   Level: developer
2357 
2358 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2359 @*/
2360 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2361 {
2362   IS              rankIS,   pointIS, closureIS;
2363   const PetscInt *ranks,   *points;
2364   PetscInt        numRanks, numPoints, r;
2365   PetscErrorCode  ierr;
2366 
2367   PetscFunctionBegin;
2368   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2369   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2370   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2371   for (r = 0; r < numRanks; ++r) {
2372     const PetscInt rank = ranks[r];
2373     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2374     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2375     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2376     ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr);
2377     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2378     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2379     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2380     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2381   }
2382   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2383   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2384   PetscFunctionReturn(0);
2385 }
2386 
2387 /*@
2388   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2389 
2390   Input Parameters:
2391 + dm     - The DM
2392 - label  - DMLabel assinging ranks to remote roots
2393 
2394   Level: developer
2395 
2396 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2397 @*/
2398 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2399 {
2400   IS              rankIS,   pointIS;
2401   const PetscInt *ranks,   *points;
2402   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2403   PetscInt       *adj = NULL;
2404   PetscErrorCode  ierr;
2405 
2406   PetscFunctionBegin;
2407   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2408   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2409   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2410   for (r = 0; r < numRanks; ++r) {
2411     const PetscInt rank = ranks[r];
2412 
2413     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2414     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2415     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2416     for (p = 0; p < numPoints; ++p) {
2417       adjSize = PETSC_DETERMINE;
2418       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2419       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2420     }
2421     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2422     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2423   }
2424   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2425   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2426   ierr = PetscFree(adj);CHKERRQ(ierr);
2427   PetscFunctionReturn(0);
2428 }
2429 
2430 /*@
2431   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2432 
2433   Input Parameters:
2434 + dm     - The DM
2435 - label  - DMLabel assinging ranks to remote roots
2436 
2437   Level: developer
2438 
2439   Note: This is required when generating multi-level overlaps to capture
2440   overlap points from non-neighbouring partitions.
2441 
2442 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2443 @*/
2444 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2445 {
2446   MPI_Comm        comm;
2447   PetscMPIInt     rank;
2448   PetscSF         sfPoint;
2449   DMLabel         lblRoots, lblLeaves;
2450   IS              rankIS, pointIS;
2451   const PetscInt *ranks;
2452   PetscInt        numRanks, r;
2453   PetscErrorCode  ierr;
2454 
2455   PetscFunctionBegin;
2456   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2457   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2458   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2459   /* Pull point contributions from remote leaves into local roots */
2460   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2461   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2462   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2463   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2464   for (r = 0; r < numRanks; ++r) {
2465     const PetscInt remoteRank = ranks[r];
2466     if (remoteRank == rank) continue;
2467     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2468     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2469     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2470   }
2471   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2472   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2473   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2474   /* Push point contributions from roots into remote leaves */
2475   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2476   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2477   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2478   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2479   for (r = 0; r < numRanks; ++r) {
2480     const PetscInt remoteRank = ranks[r];
2481     if (remoteRank == rank) continue;
2482     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2483     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2484     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2485   }
2486   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2487   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2488   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2489   PetscFunctionReturn(0);
2490 }
2491 
2492 /*@
2493   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2494 
2495   Input Parameters:
2496 + dm        - The DM
2497 . rootLabel - DMLabel assinging ranks to local roots
2498 . processSF - A star forest mapping into the local index on each remote rank
2499 
2500   Output Parameter:
2501 - leafLabel - DMLabel assinging ranks to remote roots
2502 
2503   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2504   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2505 
2506   Level: developer
2507 
2508 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2509 @*/
2510 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2511 {
2512   MPI_Comm           comm;
2513   PetscMPIInt        rank, size, r;
2514   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2515   PetscSF            sfPoint;
2516   PetscSection       rootSection;
2517   PetscSFNode       *rootPoints, *leafPoints;
2518   const PetscSFNode *remote;
2519   const PetscInt    *local, *neighbors;
2520   IS                 valueIS;
2521   PetscBool          mpiOverflow = PETSC_FALSE;
2522   PetscErrorCode     ierr;
2523 
2524   PetscFunctionBegin;
2525   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2526   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2527   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2528   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2529 
2530   /* Convert to (point, rank) and use actual owners */
2531   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2532   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2533   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2534   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2535   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2536   for (n = 0; n < numNeighbors; ++n) {
2537     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2538     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2539   }
2540   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2541   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2542   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2543   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2544   for (n = 0; n < numNeighbors; ++n) {
2545     IS              pointIS;
2546     const PetscInt *points;
2547 
2548     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2549     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2550     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2551     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2552     for (p = 0; p < numPoints; ++p) {
2553       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2554       else       {l = -1;}
2555       if (l >= 0) {rootPoints[off+p] = remote[l];}
2556       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2557     }
2558     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2559     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2560   }
2561 
2562   /* Try to communicate overlap using All-to-All */
2563   if (!processSF) {
2564     PetscInt64  counter = 0;
2565     PetscBool   locOverflow = PETSC_FALSE;
2566     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2567 
2568     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2569     for (n = 0; n < numNeighbors; ++n) {
2570       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2571       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2572 #if defined(PETSC_USE_64BIT_INDICES)
2573       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2574       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2575 #endif
2576       scounts[neighbors[n]] = (PetscMPIInt) dof;
2577       sdispls[neighbors[n]] = (PetscMPIInt) off;
2578     }
2579     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2580     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2581     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2582     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2583     if (!mpiOverflow) {
2584       leafSize = (PetscInt) counter;
2585       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2586       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2587     }
2588     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2589   }
2590 
2591   /* Communicate overlap using process star forest */
2592   if (processSF || mpiOverflow) {
2593     PetscSF      procSF;
2594     PetscSFNode  *remote;
2595     PetscSection leafSection;
2596 
2597     if (processSF) {
2598       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2599       procSF = processSF;
2600     } else {
2601       ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr);
2602       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2603       ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr);
2604       ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2605     }
2606 
2607     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2608     ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2609     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2610     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2611     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2612   }
2613 
2614   for (p = 0; p < leafSize; p++) {
2615     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2616   }
2617 
2618   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2619   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2620   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2621   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2622   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2623   PetscFunctionReturn(0);
2624 }
2625 
2626 /*@
2627   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2628 
2629   Input Parameters:
2630 + dm    - The DM
2631 . label - DMLabel assinging ranks to remote roots
2632 
2633   Output Parameter:
2634 - sf    - The star forest communication context encapsulating the defined mapping
2635 
2636   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2637 
2638   Level: developer
2639 
2640 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2641 @*/
2642 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2643 {
2644   PetscMPIInt     rank, size;
2645   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2646   PetscSFNode    *remotePoints;
2647   IS              remoteRootIS;
2648   const PetscInt *remoteRoots;
2649   PetscErrorCode ierr;
2650 
2651   PetscFunctionBegin;
2652   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2653   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2654 
2655   for (numRemote = 0, n = 0; n < size; ++n) {
2656     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2657     numRemote += numPoints;
2658   }
2659   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2660   /* Put owned points first */
2661   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2662   if (numPoints > 0) {
2663     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2664     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2665     for (p = 0; p < numPoints; p++) {
2666       remotePoints[idx].index = remoteRoots[p];
2667       remotePoints[idx].rank = rank;
2668       idx++;
2669     }
2670     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2671     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2672   }
2673   /* Now add remote points */
2674   for (n = 0; n < size; ++n) {
2675     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2676     if (numPoints <= 0 || n == rank) continue;
2677     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2678     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2679     for (p = 0; p < numPoints; p++) {
2680       remotePoints[idx].index = remoteRoots[p];
2681       remotePoints[idx].rank = n;
2682       idx++;
2683     }
2684     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2685     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2686   }
2687   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2688   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2689   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2690   PetscFunctionReturn(0);
2691 }
2692 
2693 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
2694  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
2695  * them out in that case. */
2696 #if defined(PETSC_HAVE_PARMETIS)
2697 /*@C
2698 
2699   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
2700 
2701   Input parameters:
2702   + dm                - The DMPlex object.
2703   + n                 - The number of points.
2704   + pointsToRewrite   - The points in the SF whose ownership will change.
2705   + targetOwners      - New owner for each element in pointsToRewrite.
2706   + degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
2707 
2708   Level: developer
2709 
2710 @*/
2711 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
2712 {
2713   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
2714   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
2715   PetscSFNode  *leafLocationsNew;
2716   const         PetscSFNode *iremote;
2717   const         PetscInt *ilocal;
2718   PetscBool    *isLeaf;
2719   PetscSF       sf;
2720   MPI_Comm      comm;
2721   PetscMPIInt   rank, size;
2722 
2723   PetscFunctionBegin;
2724   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2725   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2726   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2727   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2728 
2729   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2730   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2731   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2732   for (i=0; i<pEnd-pStart; i++) {
2733     isLeaf[i] = PETSC_FALSE;
2734   }
2735   for (i=0; i<nleafs; i++) {
2736     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2737   }
2738 
2739   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
2740   cumSumDegrees[0] = 0;
2741   for (i=1; i<=pEnd-pStart; i++) {
2742     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
2743   }
2744   sumDegrees = cumSumDegrees[pEnd-pStart];
2745   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
2746 
2747   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
2748   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
2749   for (i=0; i<pEnd-pStart; i++) {
2750     rankOnLeafs[i] = rank;
2751   }
2752   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2753   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2754   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
2755 
2756   /* get the remote local points of my leaves */
2757   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
2758   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
2759   for (i=0; i<pEnd-pStart; i++) {
2760     points[i] = pStart+i;
2761   }
2762   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2763   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2764   ierr = PetscFree(points);CHKERRQ(ierr);
2765   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
2766   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
2767   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
2768   for (i=0; i<pEnd-pStart; i++) {
2769     newOwners[i] = -1;
2770     newNumbers[i] = -1;
2771   }
2772   {
2773     PetscInt oldNumber, newNumber, oldOwner, newOwner;
2774     for (i=0; i<n; i++) {
2775       oldNumber = pointsToRewrite[i];
2776       newNumber = -1;
2777       oldOwner = rank;
2778       newOwner = targetOwners[i];
2779       if (oldOwner == newOwner) {
2780         newNumber = oldNumber;
2781       } else {
2782         for (j=0; j<degrees[oldNumber]; j++) {
2783           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
2784             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
2785             break;
2786           }
2787         }
2788       }
2789       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
2790 
2791       newOwners[oldNumber] = newOwner;
2792       newNumbers[oldNumber] = newNumber;
2793     }
2794   }
2795   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
2796   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
2797   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
2798 
2799   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2800   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2801   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2802   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2803 
2804   /* Now count how many leafs we have on each processor. */
2805   leafCounter=0;
2806   for (i=0; i<pEnd-pStart; i++) {
2807     if (newOwners[i] >= 0) {
2808       if (newOwners[i] != rank) {
2809         leafCounter++;
2810       }
2811     } else {
2812       if (isLeaf[i]) {
2813         leafCounter++;
2814       }
2815     }
2816   }
2817 
2818   /* Now set up the new sf by creating the leaf arrays */
2819   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
2820   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
2821 
2822   leafCounter = 0;
2823   counter = 0;
2824   for (i=0; i<pEnd-pStart; i++) {
2825     if (newOwners[i] >= 0) {
2826       if (newOwners[i] != rank) {
2827         leafsNew[leafCounter] = i;
2828         leafLocationsNew[leafCounter].rank = newOwners[i];
2829         leafLocationsNew[leafCounter].index = newNumbers[i];
2830         leafCounter++;
2831       }
2832     } else {
2833       if (isLeaf[i]) {
2834         leafsNew[leafCounter] = i;
2835         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
2836         leafLocationsNew[leafCounter].index = iremote[counter].index;
2837         leafCounter++;
2838       }
2839     }
2840     if (isLeaf[i]) {
2841       counter++;
2842     }
2843   }
2844 
2845   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
2846   ierr = PetscFree(newOwners);CHKERRQ(ierr);
2847   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
2848   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
2849   PetscFunctionReturn(0);
2850 }
2851 
2852 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
2853 {
2854   PetscInt *distribution, min, max, sum, i, ierr;
2855   PetscMPIInt rank, size;
2856   PetscFunctionBegin;
2857   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2858   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2859   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
2860   for (i=0; i<n; i++) {
2861     if (part) distribution[part[i]] += vtxwgt[skip*i];
2862     else distribution[rank] += vtxwgt[skip*i];
2863   }
2864   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2865   min = distribution[0];
2866   max = distribution[0];
2867   sum = distribution[0];
2868   for (i=1; i<size; i++) {
2869     if (distribution[i]<min) min=distribution[i];
2870     if (distribution[i]>max) max=distribution[i];
2871     sum += distribution[i];
2872   }
2873   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
2874   ierr = PetscFree(distribution);CHKERRQ(ierr);
2875   PetscFunctionReturn(0);
2876 }
2877 
2878 #endif
2879 
2880 /*@
2881   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
2882 
2883   Input parameters:
2884   + dm               - The DMPlex object.
2885   + entityDepth      - depth of the entity to balance (0 -> balance vertices).
2886   + useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
2887   + parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
2888 
2889   Output parameters:
2890   + success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
2891 
2892   Level: user
2893 
2894 @*/
2895 
2896 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
2897 {
2898 #if defined(PETSC_HAVE_PARMETIS)
2899   PetscSF     sf;
2900   PetscInt    ierr, i, j, idx, jdx;
2901   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
2902   const       PetscInt *degrees, *ilocal;
2903   const       PetscSFNode *iremote;
2904   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
2905   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
2906   PetscMPIInt rank, size;
2907   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
2908   const       PetscInt *cumSumVertices;
2909   PetscInt    offset, counter;
2910   PetscInt    lenadjncy;
2911   PetscInt    *xadj, *adjncy, *vtxwgt;
2912   PetscInt    lenxadj;
2913   PetscInt    *adjwgt = NULL;
2914   PetscInt    *part, *options;
2915   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
2916   real_t      *ubvec;
2917   PetscInt    *firstVertices, *renumbering;
2918   PetscInt    failed, failedGlobal;
2919   MPI_Comm    comm;
2920   Mat         A, Apre;
2921   const char *prefix = NULL;
2922   PetscViewer       viewer;
2923   PetscViewerFormat format;
2924   PetscLayout layout;
2925 
2926   PetscFunctionBegin;
2927   if (success) *success = PETSC_FALSE;
2928   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2929   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2930   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2931   if (size==1) PetscFunctionReturn(0);
2932 
2933   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
2934 
2935   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
2936   if (viewer) {
2937     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
2938   }
2939 
2940   /* Figure out all points in the plex that we are interested in balancing. */
2941   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
2942   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2943   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
2944 
2945   for (i=0; i<pEnd-pStart; i++) {
2946     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
2947   }
2948 
2949   /* There are three types of points:
2950    * exclusivelyOwned: points that are owned by this process and only seen by this process
2951    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
2952    * leaf: a point that is seen by this process but owned by a different process
2953    */
2954   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2955   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2956   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2957   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
2958   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
2959   for (i=0; i<pEnd-pStart; i++) {
2960     isNonExclusivelyOwned[i] = PETSC_FALSE;
2961     isExclusivelyOwned[i] = PETSC_FALSE;
2962     isLeaf[i] = PETSC_FALSE;
2963   }
2964 
2965   /* start by marking all the leafs */
2966   for (i=0; i<nleafs; i++) {
2967     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2968   }
2969 
2970   /* for an owned point, we can figure out whether another processor sees it or
2971    * not by calculating its degree */
2972   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
2973   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
2974 
2975   numExclusivelyOwned = 0;
2976   numNonExclusivelyOwned = 0;
2977   for (i=0; i<pEnd-pStart; i++) {
2978     if (toBalance[i]) {
2979       if (degrees[i] > 0) {
2980         isNonExclusivelyOwned[i] = PETSC_TRUE;
2981         numNonExclusivelyOwned += 1;
2982       } else {
2983         if (!isLeaf[i]) {
2984           isExclusivelyOwned[i] = PETSC_TRUE;
2985           numExclusivelyOwned += 1;
2986         }
2987       }
2988     }
2989   }
2990 
2991   /* We are going to build a graph with one vertex per core representing the
2992    * exclusively owned points and then one vertex per nonExclusively owned
2993    * point. */
2994 
2995   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2996   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
2997   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2998   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
2999 
3000   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3001   offset = cumSumVertices[rank];
3002   counter = 0;
3003   for (i=0; i<pEnd-pStart; i++) {
3004     if (toBalance[i]) {
3005       if (degrees[i] > 0) {
3006         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
3007         counter++;
3008       }
3009     }
3010   }
3011 
3012   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
3013   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
3014   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3015   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3016 
3017   /* Now start building the data structure for ParMETIS */
3018 
3019   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
3020   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
3021   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3022   ierr = MatSetUp(Apre);CHKERRQ(ierr);
3023 
3024   for (i=0; i<pEnd-pStart; i++) {
3025     if (toBalance[i]) {
3026       idx = cumSumVertices[rank];
3027       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3028       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3029       else continue;
3030       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3031       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3032     }
3033   }
3034 
3035   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3036   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3037 
3038   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
3039   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
3040   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3041   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
3042   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
3043 
3044   for (i=0; i<pEnd-pStart; i++) {
3045     if (toBalance[i]) {
3046       idx = cumSumVertices[rank];
3047       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3048       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3049       else continue;
3050       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3051       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3052     }
3053   }
3054 
3055   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3056   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3057   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
3058   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3059 
3060   nparts = size;
3061   wgtflag = 2;
3062   numflag = 0;
3063   ncon = 2;
3064   real_t *tpwgts;
3065   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
3066   for (i=0; i<ncon*nparts; i++) {
3067     tpwgts[i] = 1./(nparts);
3068   }
3069 
3070   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
3071   ubvec[0] = 1.01;
3072   ubvec[1] = 1.01;
3073   lenadjncy = 0;
3074   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3075     PetscInt temp=0;
3076     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3077     lenadjncy += temp;
3078     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3079   }
3080   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
3081   lenxadj = 2 + numNonExclusivelyOwned;
3082   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
3083   xadj[0] = 0;
3084   counter = 0;
3085   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3086     PetscInt        temp=0;
3087     const PetscInt *cols;
3088     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3089     ierr = PetscMemcpy(&adjncy[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
3090     counter += temp;
3091     xadj[i+1] = counter;
3092     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3093   }
3094 
3095   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
3096   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
3097   vtxwgt[0] = numExclusivelyOwned;
3098   if (ncon>1) vtxwgt[1] = 1;
3099   for (i=0; i<numNonExclusivelyOwned; i++) {
3100     vtxwgt[ncon*(i+1)] = 1;
3101     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3102   }
3103 
3104   if (viewer) {
3105     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
3106     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
3107   }
3108   if (parallel) {
3109     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
3110     options[0] = 1;
3111     options[1] = 0; /* Verbosity */
3112     options[2] = 0; /* Seed */
3113     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3114     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
3115     if (useInitialGuess) {
3116       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
3117       PetscStackPush("ParMETIS_V3_RefineKway");
3118       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3119       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3120       PetscStackPop;
3121     } else {
3122       PetscStackPush("ParMETIS_V3_PartKway");
3123       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3124       PetscStackPop;
3125       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3126     }
3127     ierr = PetscFree(options);CHKERRQ(ierr);
3128   } else {
3129     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
3130     Mat As;
3131     PetscInt numRows;
3132     PetscInt *partGlobal;
3133     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
3134 
3135     PetscInt *numExclusivelyOwnedAll;
3136     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
3137     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3138     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
3139 
3140     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
3141     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
3142     if (!rank) {
3143       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3144       lenadjncy = 0;
3145 
3146       for (i=0; i<numRows; i++) {
3147         PetscInt temp=0;
3148         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3149         lenadjncy += temp;
3150         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3151       }
3152       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
3153       lenxadj = 1 + numRows;
3154       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
3155       xadj_g[0] = 0;
3156       counter = 0;
3157       for (i=0; i<numRows; i++) {
3158         PetscInt        temp=0;
3159         const PetscInt *cols;
3160         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3161         ierr = PetscMemcpy(&adjncy_g[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
3162         counter += temp;
3163         xadj_g[i+1] = counter;
3164         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3165       }
3166       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
3167       for (i=0; i<size; i++){
3168         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3169         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3170         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3171           vtxwgt_g[ncon*j] = 1;
3172           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3173         }
3174       }
3175       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
3176       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
3177       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3178       options[METIS_OPTION_CONTIG] = 1;
3179       PetscStackPush("METIS_PartGraphKway");
3180       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3181       PetscStackPop;
3182       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3183       ierr = PetscFree(options);CHKERRQ(ierr);
3184       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
3185       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
3186       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
3187     }
3188     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
3189 
3190     /* Now scatter the parts array. */
3191     {
3192       PetscMPIInt *counts, *mpiCumSumVertices;
3193       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
3194       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
3195       for(i=0; i<size; i++) {
3196         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3197       }
3198       for(i=0; i<=size; i++) {
3199         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3200       }
3201       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3202       ierr = PetscFree(counts);CHKERRQ(ierr);
3203       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3204     }
3205 
3206     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3207     ierr = MatDestroy(&As);CHKERRQ(ierr);
3208   }
3209 
3210   ierr = MatDestroy(&A);CHKERRQ(ierr);
3211   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3212   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3213 
3214   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3215 
3216   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3217   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3218   firstVertices[rank] = part[0];
3219   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3220   for (i=0; i<size; i++) {
3221     renumbering[firstVertices[i]] = i;
3222   }
3223   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3224     part[i] = renumbering[part[i]];
3225   }
3226   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3227   failed = (PetscInt)(part[0] != rank);
3228   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3229 
3230   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3231   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3232 
3233   if (failedGlobal > 0) {
3234     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3235     ierr = PetscFree(xadj);CHKERRQ(ierr);
3236     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3237     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3238     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3239     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3240     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3241     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3242     ierr = PetscFree(part);CHKERRQ(ierr);
3243     if (viewer) {
3244       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3245       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3246     }
3247     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3248     PetscFunctionReturn(0);
3249   }
3250 
3251   /*Let's check how well we did distributing points*/
3252   if (viewer) {
3253     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3254     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3255     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3256     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3257     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3258   }
3259 
3260   /* Now check that every vertex is owned by a process that it is actually connected to. */
3261   for (i=1; i<=numNonExclusivelyOwned; i++) {
3262     PetscInt loc = 0;
3263     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3264     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3265     if (loc<0) {
3266       part[i] = rank;
3267     }
3268   }
3269 
3270   /* Let's see how significant the influences of the previous fixing up step was.*/
3271   if (viewer) {
3272     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3273     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3274   }
3275 
3276   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3277   ierr = PetscFree(xadj);CHKERRQ(ierr);
3278   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3279   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3280 
3281   /* Almost done, now rewrite the SF to reflect the new ownership. */
3282   {
3283     PetscInt *pointsToRewrite;
3284     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3285     counter = 0;
3286     for(i=0; i<pEnd-pStart; i++) {
3287       if (toBalance[i]) {
3288         if (isNonExclusivelyOwned[i]) {
3289           pointsToRewrite[counter] = i + pStart;
3290           counter++;
3291         }
3292       }
3293     }
3294     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3295     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3296   }
3297 
3298   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3299   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3300   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3301   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3302   ierr = PetscFree(part);CHKERRQ(ierr);
3303   if (viewer) {
3304     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3305     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3306   }
3307   if (success) *success = PETSC_TRUE;
3308   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3309 #else
3310   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3311 #endif
3312   PetscFunctionReturn(0);
3313 }
3314