xref: /petsc/src/dm/impls/plex/plexpartition.c (revision a9e32f4682ae4d747765e77b929de055d4d488f7)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {https://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PetscBool PTScotchPartitionercite = PETSC_FALSE;
31 const char PTScotchPartitionerCitation[] =
32   "@article{PTSCOTCH,\n"
33   "  author  = {C. Chevalier and F. Pellegrini},\n"
34   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
35   "  journal = {Parallel Computing},\n"
36   "  volume  = {34},\n"
37   "  number  = {6},\n"
38   "  pages   = {318--331},\n"
39   "  year    = {2008},\n"
40   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
41   "}\n";
42 
43 
44 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
45 
46 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
47 {
48   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
49   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
50   IS             cellNumbering;
51   const PetscInt *cellNum;
52   PetscBool      useCone, useClosure;
53   PetscSection   section;
54   PetscSegBuffer adjBuffer;
55   PetscSF        sfPoint;
56   PetscInt       *adjCells = NULL, *remoteCells = NULL;
57   const PetscInt *local;
58   PetscInt       nroots, nleaves, l;
59   PetscMPIInt    rank;
60   PetscErrorCode ierr;
61 
62   PetscFunctionBegin;
63   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
64   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
65   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
66   if (dim != depth) {
67     /* We do not handle the uninterpolated case here */
68     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
69     /* DMPlexCreateNeighborCSR does not make a numbering */
70     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
71     /* Different behavior for empty graphs */
72     if (!*numVertices) {
73       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
74       (*offsets)[0] = 0;
75     }
76     /* Broken in parallel */
77     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
78     PetscFunctionReturn(0);
79   }
80   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
81   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
82   /* Build adjacency graph via a section/segbuffer */
83   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
84   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
85   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
86   /* Always use FVM adjacency to create partitioner graph */
87   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
88   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
89   ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
90   if (globalNumbering) {
91     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
92     *globalNumbering = cellNumbering;
93   }
94   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
95   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
96      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
97    */
98   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
99   if (nroots >= 0) {
100     PetscInt fStart, fEnd, f;
101 
102     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
103     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
104     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
105     for (f = fStart; f < fEnd; ++f) {
106       const PetscInt *support;
107       PetscInt        supportSize;
108 
109       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
110       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
111       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
112       else if (supportSize == 2) {
113         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
114         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
115         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
116         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
117       }
118       /* Handle non-conforming meshes */
119       if (supportSize > 2) {
120         PetscInt        numChildren, i;
121         const PetscInt *children;
122 
123         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
124         for (i = 0; i < numChildren; ++i) {
125           const PetscInt child = children[i];
126           if (fStart <= child && child < fEnd) {
127             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
128             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
129             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
130             else if (supportSize == 2) {
131               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
132               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
133               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
134               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
135             }
136           }
137         }
138       }
139     }
140     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
141     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
142     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
143     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
144     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
145   }
146   /* Combine local and global adjacencies */
147   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
148     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
149     if (nroots > 0) {if (cellNum[p] < 0) continue;}
150     /* Add remote cells */
151     if (remoteCells) {
152       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
153       PetscInt       coneSize, numChildren, c, i;
154       const PetscInt *cone, *children;
155 
156       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
157       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
158       for (c = 0; c < coneSize; ++c) {
159         const PetscInt point = cone[c];
160         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
161           PetscInt *PETSC_RESTRICT pBuf;
162           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
163           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
164           *pBuf = remoteCells[point];
165         }
166         /* Handle non-conforming meshes */
167         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
168         for (i = 0; i < numChildren; ++i) {
169           const PetscInt child = children[i];
170           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
171             PetscInt *PETSC_RESTRICT pBuf;
172             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
173             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
174             *pBuf = remoteCells[child];
175           }
176         }
177       }
178     }
179     /* Add local cells */
180     adjSize = PETSC_DETERMINE;
181     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
182     for (a = 0; a < adjSize; ++a) {
183       const PetscInt point = adj[a];
184       if (point != p && pStart <= point && point < pEnd) {
185         PetscInt *PETSC_RESTRICT pBuf;
186         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
187         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
188         *pBuf = DMPlex_GlobalID(cellNum[point]);
189       }
190     }
191     (*numVertices)++;
192   }
193   ierr = PetscFree(adj);CHKERRQ(ierr);
194   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
195   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
196 
197   /* Derive CSR graph from section/segbuffer */
198   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
199   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
200   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
201   for (idx = 0, p = pStart; p < pEnd; p++) {
202     if (nroots > 0) {if (cellNum[p] < 0) continue;}
203     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
204   }
205   vOffsets[*numVertices] = size;
206   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
207 
208   if (nroots >= 0) {
209     /* Filter out duplicate edges using section/segbuffer */
210     ierr = PetscSectionReset(section);CHKERRQ(ierr);
211     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
212     for (p = 0; p < *numVertices; p++) {
213       PetscInt start = vOffsets[p], end = vOffsets[p+1];
214       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
215       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
216       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
217       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
218       ierr = PetscArraycpy(edges, &graph[start], numEdges);CHKERRQ(ierr);
219     }
220     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
221     ierr = PetscFree(graph);CHKERRQ(ierr);
222     /* Derive CSR graph from section/segbuffer */
223     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
224     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
225     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
226     for (idx = 0, p = 0; p < *numVertices; p++) {
227       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
228     }
229     vOffsets[*numVertices] = size;
230     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
231   } else {
232     /* Sort adjacencies (not strictly necessary) */
233     for (p = 0; p < *numVertices; p++) {
234       PetscInt start = vOffsets[p], end = vOffsets[p+1];
235       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
236     }
237   }
238 
239   if (offsets) *offsets = vOffsets;
240   if (adjacency) *adjacency = graph;
241 
242   /* Cleanup */
243   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
244   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
245   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
246   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
247   PetscFunctionReturn(0);
248 }
249 
250 static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
251 {
252   Mat            conn, CSR;
253   IS             fis, cis, cis_own;
254   PetscSF        sfPoint;
255   const PetscInt *rows, *cols, *ii, *jj;
256   PetscInt       *idxs,*idxs2;
257   PetscInt       dim, depth, floc, cloc, i, M, N, c, m, cStart, cEnd, fStart, fEnd;
258   PetscMPIInt    rank;
259   PetscBool      flg;
260   PetscErrorCode ierr;
261 
262   PetscFunctionBegin;
263   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
264   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
265   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
266   if (dim != depth) {
267     /* We do not handle the uninterpolated case here */
268     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
269     /* DMPlexCreateNeighborCSR does not make a numbering */
270     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
271     /* Different behavior for empty graphs */
272     if (!*numVertices) {
273       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
274       (*offsets)[0] = 0;
275     }
276     /* Broken in parallel */
277     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
278     PetscFunctionReturn(0);
279   }
280   /* Interpolated and parallel case */
281 
282   /* numbering */
283   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
284   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
285   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
286   ierr = DMPlexCreateNumbering_Plex(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
287   ierr = DMPlexCreateNumbering_Plex(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
288   if (globalNumbering) {
289     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
290   }
291 
292   /* get positive global ids and local sizes for facets and cells */
293   ierr = ISGetLocalSize(fis, &m);CHKERRQ(ierr);
294   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
295   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
296   for (i = 0, floc = 0; i < m; i++) {
297     const PetscInt p = rows[i];
298 
299     if (p < 0) {
300       idxs[i] = -(p+1);
301     } else {
302       idxs[i] = p;
303       floc   += 1;
304     }
305   }
306   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
307   ierr = ISDestroy(&fis);CHKERRQ(ierr);
308   ierr = ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);CHKERRQ(ierr);
309 
310   ierr = ISGetLocalSize(cis, &m);CHKERRQ(ierr);
311   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
312   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
313   ierr = PetscMalloc1(m, &idxs2);CHKERRQ(ierr);
314   for (i = 0, cloc = 0; i < m; i++) {
315     const PetscInt p = cols[i];
316 
317     if (p < 0) {
318       idxs[i] = -(p+1);
319     } else {
320       idxs[i]       = p;
321       idxs2[cloc++] = p;
322     }
323   }
324   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
325   ierr = ISDestroy(&cis);CHKERRQ(ierr);
326   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);CHKERRQ(ierr);
327   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);CHKERRQ(ierr);
328 
329   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
330   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
331   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
332   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
333   ierr = DMPlexGetMaxSizes(dm, NULL, &m);CHKERRQ(ierr);
334   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
335 
336   /* Assemble matrix */
337   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
338   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
339   for (c = cStart; c < cEnd; c++) {
340     const PetscInt *cone;
341     PetscInt        coneSize, row, col, f;
342 
343     col  = cols[c-cStart];
344     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
345     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
346     for (f = 0; f < coneSize; f++) {
347       const PetscScalar v = 1.0;
348       const PetscInt *children;
349       PetscInt        numChildren, ch;
350 
351       row  = rows[cone[f]-fStart];
352       ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
353 
354       /* non-conforming meshes */
355       ierr = DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);CHKERRQ(ierr);
356       for (ch = 0; ch < numChildren; ch++) {
357         const PetscInt child = children[ch];
358 
359         if (child < fStart || child >= fEnd) continue;
360         row  = rows[child-fStart];
361         ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
362       }
363     }
364   }
365   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
366   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
367   ierr = ISDestroy(&fis);CHKERRQ(ierr);
368   ierr = ISDestroy(&cis);CHKERRQ(ierr);
369   ierr = MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
370   ierr = MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
371 
372   /* Get parallel CSR by doing conn^T * conn */
373   ierr = MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);CHKERRQ(ierr);
374   ierr = MatDestroy(&conn);CHKERRQ(ierr);
375 
376   /* extract local part of the CSR */
377   ierr = MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);CHKERRQ(ierr);
378   ierr = MatDestroy(&CSR);CHKERRQ(ierr);
379   ierr = MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
380   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
381 
382   /* get back requested output */
383   if (numVertices) *numVertices = m;
384   if (offsets) {
385     ierr = PetscCalloc1(m+1, &idxs);CHKERRQ(ierr);
386     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
387     *offsets = idxs;
388   }
389   if (adjacency) {
390     ierr = PetscMalloc1(ii[m] - m, &idxs);CHKERRQ(ierr);
391     ierr = ISGetIndices(cis_own, &rows);CHKERRQ(ierr);
392     for (i = 0, c = 0; i < m; i++) {
393       PetscInt j, g = rows[i];
394 
395       for (j = ii[i]; j < ii[i+1]; j++) {
396         if (jj[j] == g) continue; /* again, self-connectivity */
397         idxs[c++] = jj[j];
398       }
399     }
400     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
401     ierr = ISRestoreIndices(cis_own, &rows);CHKERRQ(ierr);
402     *adjacency = idxs;
403   }
404 
405   /* cleanup */
406   ierr = ISDestroy(&cis_own);CHKERRQ(ierr);
407   ierr = MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
408   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
409   ierr = MatDestroy(&conn);CHKERRQ(ierr);
410   PetscFunctionReturn(0);
411 }
412 
413 /*@C
414   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
415 
416   Input Parameters:
417 + dm      - The mesh DM dm
418 - height  - Height of the strata from which to construct the graph
419 
420   Output Parameter:
421 + numVertices     - Number of vertices in the graph
422 . offsets         - Point offsets in the graph
423 . adjacency       - Point connectivity in the graph
424 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
425 
426   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
427   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().
428 
429   Level: developer
430 
431 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
432 @*/
433 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
434 {
435   PetscErrorCode ierr;
436   PetscBool      usemat = PETSC_FALSE;
437 
438   PetscFunctionBegin;
439   ierr = PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);CHKERRQ(ierr);
440   if (usemat) {
441     ierr = DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
442   } else {
443     ierr = DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
444   }
445   PetscFunctionReturn(0);
446 }
447 
448 /*@C
449   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
450 
451   Collective on DM
452 
453   Input Arguments:
454 + dm - The DMPlex
455 - cellHeight - The height of mesh points to treat as cells (default should be 0)
456 
457   Output Arguments:
458 + numVertices - The number of local vertices in the graph, or cells in the mesh.
459 . offsets     - The offset to the adjacency list for each cell
460 - adjacency   - The adjacency list for all cells
461 
462   Note: This is suitable for input to a mesh partitioner like ParMetis.
463 
464   Level: advanced
465 
466 .seealso: DMPlexCreate()
467 @*/
468 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
469 {
470   const PetscInt maxFaceCases = 30;
471   PetscInt       numFaceCases = 0;
472   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
473   PetscInt      *off, *adj;
474   PetscInt      *neighborCells = NULL;
475   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
476   PetscErrorCode ierr;
477 
478   PetscFunctionBegin;
479   /* For parallel partitioning, I think you have to communicate supports */
480   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
481   cellDim = dim - cellHeight;
482   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
483   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
484   if (cEnd - cStart == 0) {
485     if (numVertices) *numVertices = 0;
486     if (offsets)   *offsets   = NULL;
487     if (adjacency) *adjacency = NULL;
488     PetscFunctionReturn(0);
489   }
490   numCells  = cEnd - cStart;
491   faceDepth = depth - cellHeight;
492   if (dim == depth) {
493     PetscInt f, fStart, fEnd;
494 
495     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
496     /* Count neighboring cells */
497     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
498     for (f = fStart; f < fEnd; ++f) {
499       const PetscInt *support;
500       PetscInt        supportSize;
501       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
502       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
503       if (supportSize == 2) {
504         PetscInt numChildren;
505 
506         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
507         if (!numChildren) {
508           ++off[support[0]-cStart+1];
509           ++off[support[1]-cStart+1];
510         }
511       }
512     }
513     /* Prefix sum */
514     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
515     if (adjacency) {
516       PetscInt *tmp;
517 
518       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
519       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
520       ierr = PetscArraycpy(tmp, off, numCells+1);CHKERRQ(ierr);
521       /* Get neighboring cells */
522       for (f = fStart; f < fEnd; ++f) {
523         const PetscInt *support;
524         PetscInt        supportSize;
525         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
526         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
527         if (supportSize == 2) {
528           PetscInt numChildren;
529 
530           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
531           if (!numChildren) {
532             adj[tmp[support[0]-cStart]++] = support[1];
533             adj[tmp[support[1]-cStart]++] = support[0];
534           }
535         }
536       }
537 #if defined(PETSC_USE_DEBUG)
538       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
539 #endif
540       ierr = PetscFree(tmp);CHKERRQ(ierr);
541     }
542     if (numVertices) *numVertices = numCells;
543     if (offsets)   *offsets   = off;
544     if (adjacency) *adjacency = adj;
545     PetscFunctionReturn(0);
546   }
547   /* Setup face recognition */
548   if (faceDepth == 1) {
549     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
550 
551     for (c = cStart; c < cEnd; ++c) {
552       PetscInt corners;
553 
554       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
555       if (!cornersSeen[corners]) {
556         PetscInt nFV;
557 
558         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
559         cornersSeen[corners] = 1;
560 
561         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
562 
563         numFaceVertices[numFaceCases++] = nFV;
564       }
565     }
566   }
567   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
568   /* Count neighboring cells */
569   for (cell = cStart; cell < cEnd; ++cell) {
570     PetscInt numNeighbors = PETSC_DETERMINE, n;
571 
572     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
573     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
574     for (n = 0; n < numNeighbors; ++n) {
575       PetscInt        cellPair[2];
576       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
577       PetscInt        meetSize = 0;
578       const PetscInt *meet    = NULL;
579 
580       cellPair[0] = cell; cellPair[1] = neighborCells[n];
581       if (cellPair[0] == cellPair[1]) continue;
582       if (!found) {
583         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
584         if (meetSize) {
585           PetscInt f;
586 
587           for (f = 0; f < numFaceCases; ++f) {
588             if (numFaceVertices[f] == meetSize) {
589               found = PETSC_TRUE;
590               break;
591             }
592           }
593         }
594         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
595       }
596       if (found) ++off[cell-cStart+1];
597     }
598   }
599   /* Prefix sum */
600   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
601 
602   if (adjacency) {
603     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
604     /* Get neighboring cells */
605     for (cell = cStart; cell < cEnd; ++cell) {
606       PetscInt numNeighbors = PETSC_DETERMINE, n;
607       PetscInt cellOffset   = 0;
608 
609       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
610       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
611       for (n = 0; n < numNeighbors; ++n) {
612         PetscInt        cellPair[2];
613         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
614         PetscInt        meetSize = 0;
615         const PetscInt *meet    = NULL;
616 
617         cellPair[0] = cell; cellPair[1] = neighborCells[n];
618         if (cellPair[0] == cellPair[1]) continue;
619         if (!found) {
620           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
621           if (meetSize) {
622             PetscInt f;
623 
624             for (f = 0; f < numFaceCases; ++f) {
625               if (numFaceVertices[f] == meetSize) {
626                 found = PETSC_TRUE;
627                 break;
628               }
629             }
630           }
631           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
632         }
633         if (found) {
634           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
635           ++cellOffset;
636         }
637       }
638     }
639   }
640   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
641   if (numVertices) *numVertices = numCells;
642   if (offsets)   *offsets   = off;
643   if (adjacency) *adjacency = adj;
644   PetscFunctionReturn(0);
645 }
646 
647 /*@C
648   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
649 
650   Not Collective
651 
652   Input Parameters:
653 + name        - The name of a new user-defined creation routine
654 - create_func - The creation routine itself
655 
656   Notes:
657   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
658 
659   Sample usage:
660 .vb
661     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
662 .ve
663 
664   Then, your PetscPartitioner type can be chosen with the procedural interface via
665 .vb
666     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
667     PetscPartitionerSetType(PetscPartitioner, "my_part");
668 .ve
669    or at runtime via the option
670 .vb
671     -petscpartitioner_type my_part
672 .ve
673 
674   Level: advanced
675 
676 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
677 
678 @*/
679 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
680 {
681   PetscErrorCode ierr;
682 
683   PetscFunctionBegin;
684   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
685   PetscFunctionReturn(0);
686 }
687 
688 /*@C
689   PetscPartitionerSetType - Builds a particular PetscPartitioner
690 
691   Collective on PetscPartitioner
692 
693   Input Parameters:
694 + part - The PetscPartitioner object
695 - name - The kind of partitioner
696 
697   Options Database Key:
698 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
699 
700   Level: intermediate
701 
702 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
703 @*/
704 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
705 {
706   PetscErrorCode (*r)(PetscPartitioner);
707   PetscBool      match;
708   PetscErrorCode ierr;
709 
710   PetscFunctionBegin;
711   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
712   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
713   if (match) PetscFunctionReturn(0);
714 
715   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
716   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
717 
718   if (part->ops->destroy) {
719     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
720   }
721   part->noGraph = PETSC_FALSE;
722   ierr = PetscMemzero(part->ops, sizeof(*part->ops));CHKERRQ(ierr);
723   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
724   ierr = (*r)(part);CHKERRQ(ierr);
725   PetscFunctionReturn(0);
726 }
727 
728 /*@C
729   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
730 
731   Not Collective
732 
733   Input Parameter:
734 . part - The PetscPartitioner
735 
736   Output Parameter:
737 . name - The PetscPartitioner type name
738 
739   Level: intermediate
740 
741 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
742 @*/
743 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
744 {
745   PetscFunctionBegin;
746   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
747   PetscValidPointer(name, 2);
748   *name = ((PetscObject) part)->type_name;
749   PetscFunctionReturn(0);
750 }
751 
752 /*@C
753    PetscPartitionerViewFromOptions - View from Options
754 
755    Collective on PetscPartitioner
756 
757    Input Parameters:
758 +  A - the PetscPartitioner object
759 .  obj - Optional object
760 -  name - command line option
761 
762    Level: intermediate
763 .seealso:  PetscPartitionerView(), PetscObjectViewFromOptions()
764 @*/
765 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner A,PetscObject obj,const char name[])
766 {
767   PetscErrorCode ierr;
768 
769   PetscFunctionBegin;
770   PetscValidHeaderSpecific(A,PETSCPARTITIONER_CLASSID,1);
771   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
772   PetscFunctionReturn(0);
773 }
774 
775 /*@C
776   PetscPartitionerView - Views a PetscPartitioner
777 
778   Collective on PetscPartitioner
779 
780   Input Parameter:
781 + part - the PetscPartitioner object to view
782 - v    - the viewer
783 
784   Level: developer
785 
786 .seealso: PetscPartitionerDestroy()
787 @*/
788 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
789 {
790   PetscMPIInt    size;
791   PetscBool      isascii;
792   PetscErrorCode ierr;
793 
794   PetscFunctionBegin;
795   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
796   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
797   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
798   if (isascii) {
799     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
800     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
801     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
802     ierr = PetscViewerASCIIPrintf(v, "  edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
803     ierr = PetscViewerASCIIPrintf(v, "  balance: %.2g\n", part->balance);CHKERRQ(ierr);
804     ierr = PetscViewerASCIIPrintf(v, "  use vertex weights: %d\n", part->usevwgt);CHKERRQ(ierr);
805   }
806   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
807   PetscFunctionReturn(0);
808 }
809 
810 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
811 {
812   PetscFunctionBegin;
813   if (!currentType) {
814 #if defined(PETSC_HAVE_PARMETIS)
815     *defaultType = PETSCPARTITIONERPARMETIS;
816 #elif defined(PETSC_HAVE_PTSCOTCH)
817     *defaultType = PETSCPARTITIONERPTSCOTCH;
818 #elif defined(PETSC_HAVE_CHACO)
819     *defaultType = PETSCPARTITIONERCHACO;
820 #else
821     *defaultType = PETSCPARTITIONERSIMPLE;
822 #endif
823   } else {
824     *defaultType = currentType;
825   }
826   PetscFunctionReturn(0);
827 }
828 
829 /*@
830   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
831 
832   Collective on PetscPartitioner
833 
834   Input Parameter:
835 . part - the PetscPartitioner object to set options for
836 
837   Options Database Keys:
838 +  -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
839 .  -petscpartitioner_use_vertex_weights - Uses weights associated with the graph vertices
840 -  -petscpartitioner_view_graph - View the graph each time PetscPartitionerPartition is called. Viewer can be customized, see PetscOptionsGetViewer()
841 
842   Level: developer
843 
844 .seealso: PetscPartitionerView(), PetscPartitionerSetType(), PetscPartitionerPartition()
845 @*/
846 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
847 {
848   const char    *defaultType;
849   char           name[256];
850   PetscBool      flg;
851   PetscErrorCode ierr;
852 
853   PetscFunctionBegin;
854   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
855   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
856   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
857   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
858   if (flg) {
859     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
860   } else if (!((PetscObject) part)->type_name) {
861     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
862   }
863   ierr = PetscOptionsBool("-petscpartitioner_use_vertex_weights","Use vertex weights","",part->usevwgt,&part->usevwgt,NULL);CHKERRQ(ierr);
864   if (part->ops->setfromoptions) {
865     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
866   }
867   ierr = PetscViewerDestroy(&part->viewer);CHKERRQ(ierr);
868   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
869   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view", &part->viewer, NULL, NULL);CHKERRQ(ierr);
870   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, NULL, &part->viewGraph);CHKERRQ(ierr);
871   /* process any options handlers added with PetscObjectAddOptionsHandler() */
872   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
873   ierr = PetscOptionsEnd();CHKERRQ(ierr);
874   PetscFunctionReturn(0);
875 }
876 
877 /*@C
878   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
879 
880   Collective on PetscPartitioner
881 
882   Input Parameter:
883 . part - the PetscPartitioner object to setup
884 
885   Level: developer
886 
887 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
888 @*/
889 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
890 {
891   PetscErrorCode ierr;
892 
893   PetscFunctionBegin;
894   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
895   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
896   PetscFunctionReturn(0);
897 }
898 
899 /*@
900   PetscPartitionerDestroy - Destroys a PetscPartitioner object
901 
902   Collective on PetscPartitioner
903 
904   Input Parameter:
905 . part - the PetscPartitioner object to destroy
906 
907   Level: developer
908 
909 .seealso: PetscPartitionerView()
910 @*/
911 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
912 {
913   PetscErrorCode ierr;
914 
915   PetscFunctionBegin;
916   if (!*part) PetscFunctionReturn(0);
917   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
918 
919   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
920   ((PetscObject) (*part))->refct = 0;
921 
922   ierr = PetscViewerDestroy(&(*part)->viewer);CHKERRQ(ierr);
923   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
924   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
925   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
926   PetscFunctionReturn(0);
927 }
928 
929 /*@
930   PetscPartitionerPartition - Partition a graph
931 
932   Collective on PetscPartitioner
933 
934   Input Parameters:
935 + part    - The PetscPartitioner
936 . nparts  - Number of partitions
937 . numVertices - Number of vertices in the local part of the graph
938 . start - row pointers for the local part of the graph (CSR style)
939 . adjacency - adjacency list (CSR style)
940 . vertexSection - PetscSection describing the absolute weight of each local vertex (can be NULL)
941 - targetSection - PetscSection describing the absolute weight of each partition (can be NULL)
942 
943   Output Parameters:
944 + partSection     - The PetscSection giving the division of points by partition
945 - partition       - The list of points by partition
946 
947   Options Database:
948 . -petscpartitioner_view - View the partitioner information
949 . -petscpartitioner_view_graph - View the graph we are partitioning
950 
951   Notes:
952     The chart of the vertexSection (if present) must contain [0,numVertices), with the number of dofs in the section specifying the absolute weight for each vertex.
953     The chart of the targetSection (if present) must contain [0,nparts), with the number of dofs in the section specifying the absolute weight for each partition. This information must be the same across processes, PETSc does not check it.
954 
955   Level: developer
956 
957 .seealso PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscSectionSetDof()
958 @*/
959 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertexSection, PetscSection targetSection, PetscSection partSection, IS *partition)
960 {
961   PetscErrorCode ierr;
962 
963   PetscFunctionBegin;
964   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
965   PetscValidLogicalCollectiveInt(part, nparts, 2);
966   if (nparts <= 0) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Number of parts must be positive");
967   if (numVertices < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices must be non-negative");
968   if (numVertices && !part->noGraph) {
969     PetscValidIntPointer(start, 4);
970     PetscValidIntPointer(adjacency, 5);
971   }
972   if (vertexSection) {
973     PetscInt s,e;
974 
975     PetscValidHeaderSpecific(vertexSection, PETSC_SECTION_CLASSID, 6);
976     ierr = PetscSectionGetChart(vertexSection, &s, &e);CHKERRQ(ierr);
977     if (s > 0 || e < numVertices) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid vertexSection chart [%D,%D)",s,e);
978   }
979   if (targetSection) {
980     PetscInt s,e;
981 
982     PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 7);
983     ierr = PetscSectionGetChart(targetSection, &s, &e);CHKERRQ(ierr);
984     if (s > 0 || e < nparts) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid targetSection chart [%D,%D)",s,e);
985   }
986   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 8);
987   PetscValidPointer(partition, 9);
988 
989   ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
990   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
991   if (nparts == 1) { /* quick */
992     ierr = PetscSectionSetDof(partSection, 0, numVertices);CHKERRQ(ierr);
993     ierr = ISCreateStride(PetscObjectComm((PetscObject)part),numVertices,0,1,partition);CHKERRQ(ierr);
994   } else {
995     if (!part->ops->partition) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "PetscPartitioner %s has no partitioning method", ((PetscObject)part)->type_name);
996     ierr = (*part->ops->partition)(part, nparts, numVertices, start, adjacency, vertexSection, targetSection, partSection, partition);CHKERRQ(ierr);
997   }
998   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
999   if (part->viewerGraph) {
1000     PetscViewer viewer = part->viewerGraph;
1001     PetscBool   isascii;
1002     PetscInt    v, i;
1003     PetscMPIInt rank;
1004 
1005     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
1006     ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
1007     if (isascii) {
1008       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1009       ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
1010       for (v = 0; v < numVertices; ++v) {
1011         const PetscInt s = start[v];
1012         const PetscInt e = start[v+1];
1013 
1014         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
1015         for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
1016         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
1017       }
1018       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1019       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1020     }
1021   }
1022   if (part->viewer) {
1023     ierr = PetscPartitionerView(part,part->viewer);CHKERRQ(ierr);
1024   }
1025   PetscFunctionReturn(0);
1026 }
1027 
1028 /*@
1029   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
1030 
1031   Collective
1032 
1033   Input Parameter:
1034 . comm - The communicator for the PetscPartitioner object
1035 
1036   Output Parameter:
1037 . part - The PetscPartitioner object
1038 
1039   Level: beginner
1040 
1041 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
1042 @*/
1043 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
1044 {
1045   PetscPartitioner p;
1046   const char       *partitionerType = NULL;
1047   PetscErrorCode   ierr;
1048 
1049   PetscFunctionBegin;
1050   PetscValidPointer(part, 2);
1051   *part = NULL;
1052   ierr = DMInitializePackage();CHKERRQ(ierr);
1053 
1054   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
1055   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
1056   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
1057 
1058   p->edgeCut = 0;
1059   p->balance = 0.0;
1060   p->usevwgt = PETSC_TRUE;
1061 
1062   *part = p;
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 /*@
1067   PetscPartitionerDMPlexPartition - Create a non-overlapping partition of the cells in the mesh
1068 
1069   Collective on PetscPartitioner
1070 
1071   Input Parameters:
1072 + part    - The PetscPartitioner
1073 . targetSection - The PetscSection describing the absolute weight of each partition (can be NULL)
1074 - dm      - The mesh DM
1075 
1076   Output Parameters:
1077 + partSection     - The PetscSection giving the division of points by partition
1078 - partition       - The list of points by partition
1079 
1080   Notes:
1081     If the DM has a local section associated, each point to be partitioned will be weighted by the total number of dofs identified
1082     by the section in the transitive closure of the point.
1083 
1084   Level: developer
1085 
1086 .seealso DMPlexDistribute(), PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscPartitionerPartition()
1087 @*/
1088 PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner part, DM dm, PetscSection targetSection, PetscSection partSection, IS *partition)
1089 {
1090   PetscMPIInt    size;
1091   PetscBool      isplex;
1092   PetscErrorCode ierr;
1093   PetscSection   vertSection = NULL;
1094 
1095   PetscFunctionBegin;
1096   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1097   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
1098   if (targetSection) PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 3);
1099   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
1100   PetscValidPointer(partition, 5);
1101   ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&isplex);CHKERRQ(ierr);
1102   if (!isplex) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Not for type %s",((PetscObject)dm)->type_name);
1103   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
1104   if (size == 1) {
1105     PetscInt *points;
1106     PetscInt  cStart, cEnd, c;
1107 
1108     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1109     ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
1110     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
1111     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
1112     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1113     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
1114     for (c = cStart; c < cEnd; ++c) points[c] = c;
1115     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1116     PetscFunctionReturn(0);
1117   }
1118   if (part->height == 0) {
1119     PetscInt numVertices = 0;
1120     PetscInt *start     = NULL;
1121     PetscInt *adjacency = NULL;
1122     IS       globalNumbering;
1123 
1124     if (!part->noGraph || part->viewGraph) {
1125       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
1126     } else { /* only compute the number of owned local vertices */
1127       const PetscInt *idxs;
1128       PetscInt       p, pStart, pEnd;
1129 
1130       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1131       ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
1132       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1133       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
1134       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1135     }
1136     if (part->usevwgt) {
1137       PetscSection   section = dm->localSection, clSection = NULL;
1138       IS             clPoints = NULL;
1139       const PetscInt *gid,*clIdx;
1140       PetscInt       v, p, pStart, pEnd;
1141 
1142       /* dm->localSection encodes degrees of freedom per point, not per cell. We need to get the closure index to properly specify cell weights (aka dofs) */
1143       /* We do this only if the local section has been set */
1144       if (section) {
1145         ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, NULL);CHKERRQ(ierr);
1146         if (!clSection) {
1147           ierr = DMPlexCreateClosureIndex(dm,NULL);CHKERRQ(ierr);
1148         }
1149         ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, &clPoints);CHKERRQ(ierr);
1150         ierr = ISGetIndices(clPoints,&clIdx);CHKERRQ(ierr);
1151       }
1152       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1153       ierr = PetscSectionCreate(PETSC_COMM_SELF, &vertSection);CHKERRQ(ierr);
1154       ierr = PetscSectionSetChart(vertSection, 0, numVertices);CHKERRQ(ierr);
1155       if (globalNumbering) {
1156         ierr = ISGetIndices(globalNumbering,&gid);CHKERRQ(ierr);
1157       } else gid = NULL;
1158       for (p = pStart, v = 0; p < pEnd; ++p) {
1159         PetscInt dof = 1;
1160 
1161         /* skip cells in the overlap */
1162         if (gid && gid[p-pStart] < 0) continue;
1163 
1164         if (section) {
1165           PetscInt cl, clSize, clOff;
1166 
1167           dof  = 0;
1168           ierr = PetscSectionGetDof(clSection, p, &clSize);CHKERRQ(ierr);
1169           ierr = PetscSectionGetOffset(clSection, p, &clOff);CHKERRQ(ierr);
1170           for (cl = 0; cl < clSize; cl+=2) {
1171             PetscInt clDof, clPoint = clIdx[clOff + cl]; /* odd indices are reserved for orientations */
1172 
1173             ierr = PetscSectionGetDof(section, clPoint, &clDof);CHKERRQ(ierr);
1174             dof += clDof;
1175           }
1176         }
1177         if (!dof) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of dofs for point %D in the local section should be positive",p);
1178         ierr = PetscSectionSetDof(vertSection, v, dof);CHKERRQ(ierr);
1179         v++;
1180       }
1181       if (globalNumbering) {
1182         ierr = ISRestoreIndices(globalNumbering,&gid);CHKERRQ(ierr);
1183       }
1184       if (clPoints) {
1185         ierr = ISRestoreIndices(clPoints,&clIdx);CHKERRQ(ierr);
1186       }
1187       ierr = PetscSectionSetUp(vertSection);CHKERRQ(ierr);
1188     }
1189     ierr = PetscPartitionerPartition(part, size, numVertices, start, adjacency, vertSection, targetSection, partSection, partition);CHKERRQ(ierr);
1190     ierr = PetscFree(start);CHKERRQ(ierr);
1191     ierr = PetscFree(adjacency);CHKERRQ(ierr);
1192     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1193       const PetscInt *globalNum;
1194       const PetscInt *partIdx;
1195       PetscInt       *map, cStart, cEnd;
1196       PetscInt       *adjusted, i, localSize, offset;
1197       IS             newPartition;
1198 
1199       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
1200       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
1201       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1202       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
1203       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
1204       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1205       for (i = cStart, offset = 0; i < cEnd; i++) {
1206         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1207       }
1208       for (i = 0; i < localSize; i++) {
1209         adjusted[i] = map[partIdx[i]];
1210       }
1211       ierr = PetscFree(map);CHKERRQ(ierr);
1212       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
1213       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1214       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
1215       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
1216       ierr = ISDestroy(partition);CHKERRQ(ierr);
1217       *partition = newPartition;
1218     }
1219   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1220   ierr = PetscSectionDestroy(&vertSection);CHKERRQ(ierr);
1221   PetscFunctionReturn(0);
1222 }
1223 
1224 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1225 {
1226   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1227   PetscErrorCode          ierr;
1228 
1229   PetscFunctionBegin;
1230   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1231   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1232   ierr = PetscFree(p);CHKERRQ(ierr);
1233   PetscFunctionReturn(0);
1234 }
1235 
1236 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1237 {
1238   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1239   PetscErrorCode          ierr;
1240 
1241   PetscFunctionBegin;
1242   if (p->random) {
1243     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1244     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
1245     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1246   }
1247   PetscFunctionReturn(0);
1248 }
1249 
1250 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1251 {
1252   PetscBool      iascii;
1253   PetscErrorCode ierr;
1254 
1255   PetscFunctionBegin;
1256   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1257   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1258   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1259   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
1260   PetscFunctionReturn(0);
1261 }
1262 
1263 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1264 {
1265   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1266   PetscErrorCode          ierr;
1267 
1268   PetscFunctionBegin;
1269   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
1270   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
1271   ierr = PetscOptionsTail();CHKERRQ(ierr);
1272   PetscFunctionReturn(0);
1273 }
1274 
1275 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1276 {
1277   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1278   PetscInt                np;
1279   PetscErrorCode          ierr;
1280 
1281   PetscFunctionBegin;
1282   if (p->random) {
1283     PetscRandom r;
1284     PetscInt   *sizes, *points, v, p;
1285     PetscMPIInt rank;
1286 
1287     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) part), &rank);CHKERRQ(ierr);
1288     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1289     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
1290     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
1291     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
1292     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1293     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1294     for (v = numVertices-1; v > 0; --v) {
1295       PetscReal val;
1296       PetscInt  w, tmp;
1297 
1298       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
1299       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
1300       w    = PetscFloorReal(val);
1301       tmp       = points[v];
1302       points[v] = points[w];
1303       points[w] = tmp;
1304     }
1305     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1306     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
1307     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
1308   }
1309   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1310   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
1311   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1312   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
1313   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1314   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
1315   *partition = p->partition;
1316   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
1317   PetscFunctionReturn(0);
1318 }
1319 
1320 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1321 {
1322   PetscFunctionBegin;
1323   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1324   part->ops->view           = PetscPartitionerView_Shell;
1325   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1326   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1327   part->ops->partition      = PetscPartitionerPartition_Shell;
1328   PetscFunctionReturn(0);
1329 }
1330 
1331 /*MC
1332   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
1333 
1334   Level: intermediate
1335 
1336   Options Database Keys:
1337 .  -petscpartitioner_shell_random - Use a random partition
1338 
1339 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1340 M*/
1341 
1342 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1343 {
1344   PetscPartitioner_Shell *p;
1345   PetscErrorCode          ierr;
1346 
1347   PetscFunctionBegin;
1348   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1349   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1350   part->data = p;
1351 
1352   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1353   p->random = PETSC_FALSE;
1354   PetscFunctionReturn(0);
1355 }
1356 
1357 /*@C
1358   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1359 
1360   Collective on PetscPartitioner
1361 
1362   Input Parameters:
1363 + part   - The PetscPartitioner
1364 . size   - The number of partitions
1365 . sizes  - array of length size (or NULL) providing the number of points in each partition
1366 - points - array of length sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1367 
1368   Level: developer
1369 
1370   Notes:
1371     It is safe to free the sizes and points arrays after use in this routine.
1372 
1373 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1374 @*/
1375 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1376 {
1377   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1378   PetscInt                proc, numPoints;
1379   PetscErrorCode          ierr;
1380 
1381   PetscFunctionBegin;
1382   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1383   if (sizes)  {PetscValidPointer(sizes, 3);}
1384   if (points) {PetscValidPointer(points, 4);}
1385   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1386   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1387   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1388   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1389   if (sizes) {
1390     for (proc = 0; proc < size; ++proc) {
1391       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1392     }
1393   }
1394   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1395   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1396   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1397   PetscFunctionReturn(0);
1398 }
1399 
1400 /*@
1401   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1402 
1403   Collective on PetscPartitioner
1404 
1405   Input Parameters:
1406 + part   - The PetscPartitioner
1407 - random - The flag to use a random partition
1408 
1409   Level: intermediate
1410 
1411 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1412 @*/
1413 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1414 {
1415   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1416 
1417   PetscFunctionBegin;
1418   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1419   p->random = random;
1420   PetscFunctionReturn(0);
1421 }
1422 
1423 /*@
1424   PetscPartitionerShellGetRandom - get the flag to use a random partition
1425 
1426   Collective on PetscPartitioner
1427 
1428   Input Parameter:
1429 . part   - The PetscPartitioner
1430 
1431   Output Parameter
1432 . random - The flag to use a random partition
1433 
1434   Level: intermediate
1435 
1436 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1437 @*/
1438 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1439 {
1440   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1441 
1442   PetscFunctionBegin;
1443   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1444   PetscValidPointer(random, 2);
1445   *random = p->random;
1446   PetscFunctionReturn(0);
1447 }
1448 
1449 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1450 {
1451   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1452   PetscErrorCode          ierr;
1453 
1454   PetscFunctionBegin;
1455   ierr = PetscFree(p);CHKERRQ(ierr);
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1460 {
1461   PetscFunctionBegin;
1462   PetscFunctionReturn(0);
1463 }
1464 
1465 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1466 {
1467   PetscBool      iascii;
1468   PetscErrorCode ierr;
1469 
1470   PetscFunctionBegin;
1471   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1472   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1473   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1474   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1475   PetscFunctionReturn(0);
1476 }
1477 
1478 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1479 {
1480   MPI_Comm       comm;
1481   PetscInt       np, *tpwgts = NULL, sumw = 0, numVerticesGlobal  = 0;
1482   PetscMPIInt    size;
1483   PetscErrorCode ierr;
1484 
1485   PetscFunctionBegin;
1486   if (vertSection) { ierr = PetscInfo(part,"PETSCPARTITIONERSIMPLE ignores vertex weights\n");CHKERRQ(ierr); }
1487   comm = PetscObjectComm((PetscObject)part);
1488   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1489   if (targetSection) {
1490     ierr = MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
1491     ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
1492     for (np = 0; np < nparts; ++np) {
1493       ierr = PetscSectionGetDof(targetSection,np,&tpwgts[np]);CHKERRQ(ierr);
1494       sumw += tpwgts[np];
1495     }
1496     if (!sumw) {
1497       ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1498     } else {
1499       PetscInt m,mp;
1500       for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np]*numVerticesGlobal)/sumw;
1501       for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
1502         if (m < tpwgts[np]) { m = tpwgts[np]; mp = np; }
1503         sumw += tpwgts[np];
1504       }
1505       if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
1506     }
1507   }
1508 
1509   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1510   if (size == 1) {
1511     if (tpwgts) {
1512       for (np = 0; np < nparts; ++np) {
1513         ierr = PetscSectionSetDof(partSection, np, tpwgts[np]);CHKERRQ(ierr);
1514       }
1515     } else {
1516       for (np = 0; np < nparts; ++np) {
1517         ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);
1518       }
1519     }
1520   } else {
1521     if (tpwgts) {
1522       Vec         v;
1523       PetscScalar *array;
1524       PetscInt    st,j;
1525       PetscMPIInt rank;
1526 
1527       ierr = VecCreate(comm,&v);CHKERRQ(ierr);
1528       ierr = VecSetSizes(v,numVertices,numVerticesGlobal);CHKERRQ(ierr);
1529       ierr = VecSetType(v,VECSTANDARD);CHKERRQ(ierr);
1530       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1531       for (np = 0,st = 0; np < nparts; ++np) {
1532         if (rank == np || (rank == size-1 && size < nparts && np >= size)) {
1533           for (j = 0; j < tpwgts[np]; j++) {
1534             ierr = VecSetValue(v,st+j,np,INSERT_VALUES);CHKERRQ(ierr);
1535           }
1536         }
1537         st += tpwgts[np];
1538       }
1539       ierr = VecAssemblyBegin(v);CHKERRQ(ierr);
1540       ierr = VecAssemblyEnd(v);CHKERRQ(ierr);
1541       ierr = VecGetArray(v,&array);CHKERRQ(ierr);
1542       for (j = 0; j < numVertices; ++j) {
1543         ierr = PetscSectionAddDof(partSection,PetscRealPart(array[j]),1);CHKERRQ(ierr);
1544       }
1545       ierr = VecRestoreArray(v,&array);CHKERRQ(ierr);
1546       ierr = VecDestroy(&v);CHKERRQ(ierr);
1547     } else {
1548       PetscMPIInt rank;
1549       PetscInt nvGlobal, *offsets, myFirst, myLast;
1550 
1551       ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1552       offsets[0] = 0;
1553       ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1554       for (np = 2; np <= size; np++) {
1555         offsets[np] += offsets[np-1];
1556       }
1557       nvGlobal = offsets[size];
1558       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1559       myFirst = offsets[rank];
1560       myLast  = offsets[rank + 1] - 1;
1561       ierr = PetscFree(offsets);CHKERRQ(ierr);
1562       if (numVertices) {
1563         PetscInt firstPart = 0, firstLargePart = 0;
1564         PetscInt lastPart = 0, lastLargePart = 0;
1565         PetscInt rem = nvGlobal % nparts;
1566         PetscInt pSmall = nvGlobal/nparts;
1567         PetscInt pBig = nvGlobal/nparts + 1;
1568 
1569         if (rem) {
1570           firstLargePart = myFirst / pBig;
1571           lastLargePart  = myLast  / pBig;
1572 
1573           if (firstLargePart < rem) {
1574             firstPart = firstLargePart;
1575           } else {
1576             firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1577           }
1578           if (lastLargePart < rem) {
1579             lastPart = lastLargePart;
1580           } else {
1581             lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1582           }
1583         } else {
1584           firstPart = myFirst / (nvGlobal/nparts);
1585           lastPart  = myLast  / (nvGlobal/nparts);
1586         }
1587 
1588         for (np = firstPart; np <= lastPart; np++) {
1589           PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1590           PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1591 
1592           PartStart = PetscMax(PartStart,myFirst);
1593           PartEnd   = PetscMin(PartEnd,myLast+1);
1594           ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1595         }
1596       }
1597     }
1598   }
1599   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1600   PetscFunctionReturn(0);
1601 }
1602 
1603 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1604 {
1605   PetscFunctionBegin;
1606   part->noGraph        = PETSC_TRUE;
1607   part->ops->view      = PetscPartitionerView_Simple;
1608   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1609   part->ops->partition = PetscPartitionerPartition_Simple;
1610   PetscFunctionReturn(0);
1611 }
1612 
1613 /*MC
1614   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1615 
1616   Level: intermediate
1617 
1618 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1619 M*/
1620 
1621 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1622 {
1623   PetscPartitioner_Simple *p;
1624   PetscErrorCode           ierr;
1625 
1626   PetscFunctionBegin;
1627   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1628   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1629   part->data = p;
1630 
1631   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1632   PetscFunctionReturn(0);
1633 }
1634 
1635 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1636 {
1637   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1638   PetscErrorCode          ierr;
1639 
1640   PetscFunctionBegin;
1641   ierr = PetscFree(p);CHKERRQ(ierr);
1642   PetscFunctionReturn(0);
1643 }
1644 
1645 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1646 {
1647   PetscFunctionBegin;
1648   PetscFunctionReturn(0);
1649 }
1650 
1651 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1652 {
1653   PetscBool      iascii;
1654   PetscErrorCode ierr;
1655 
1656   PetscFunctionBegin;
1657   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1658   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1659   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1660   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1661   PetscFunctionReturn(0);
1662 }
1663 
1664 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1665 {
1666   PetscInt       np;
1667   PetscErrorCode ierr;
1668 
1669   PetscFunctionBegin;
1670   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1671   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1672   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1673   PetscFunctionReturn(0);
1674 }
1675 
1676 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1677 {
1678   PetscFunctionBegin;
1679   part->noGraph        = PETSC_TRUE;
1680   part->ops->view      = PetscPartitionerView_Gather;
1681   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1682   part->ops->partition = PetscPartitionerPartition_Gather;
1683   PetscFunctionReturn(0);
1684 }
1685 
1686 /*MC
1687   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1688 
1689   Level: intermediate
1690 
1691 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1692 M*/
1693 
1694 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1695 {
1696   PetscPartitioner_Gather *p;
1697   PetscErrorCode           ierr;
1698 
1699   PetscFunctionBegin;
1700   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1701   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1702   part->data = p;
1703 
1704   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1705   PetscFunctionReturn(0);
1706 }
1707 
1708 
1709 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1710 {
1711   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1712   PetscErrorCode          ierr;
1713 
1714   PetscFunctionBegin;
1715   ierr = PetscFree(p);CHKERRQ(ierr);
1716   PetscFunctionReturn(0);
1717 }
1718 
1719 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1720 {
1721   PetscFunctionBegin;
1722   PetscFunctionReturn(0);
1723 }
1724 
1725 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1726 {
1727   PetscBool      iascii;
1728   PetscErrorCode ierr;
1729 
1730   PetscFunctionBegin;
1731   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1732   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1733   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1734   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1735   PetscFunctionReturn(0);
1736 }
1737 
1738 #if defined(PETSC_HAVE_CHACO)
1739 #if defined(PETSC_HAVE_UNISTD_H)
1740 #include <unistd.h>
1741 #endif
1742 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1743 #include <chaco.h>
1744 #else
1745 /* Older versions of Chaco do not have an include file */
1746 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1747                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1748                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1749                        int mesh_dims[3], double *goal, int global_method, int local_method,
1750                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1751 #endif
1752 extern int FREE_GRAPH;
1753 #endif
1754 
1755 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1756 {
1757 #if defined(PETSC_HAVE_CHACO)
1758   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1759   MPI_Comm       comm;
1760   int            nvtxs          = numVertices; /* number of vertices in full graph */
1761   int           *vwgts          = NULL;   /* weights for all vertices */
1762   float         *ewgts          = NULL;   /* weights for all edges */
1763   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1764   char          *outassignname  = NULL;   /*  name of assignment output file */
1765   char          *outfilename    = NULL;   /* output file name */
1766   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1767   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1768   int            mesh_dims[3];            /* dimensions of mesh of processors */
1769   double        *goal          = NULL;    /* desired set sizes for each set */
1770   int            global_method = 1;       /* global partitioning algorithm */
1771   int            local_method  = 1;       /* local partitioning algorithm */
1772   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1773   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1774   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1775   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1776   long           seed          = 123636512; /* for random graph mutations */
1777 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1778   int           *assignment;              /* Output partition */
1779 #else
1780   short int     *assignment;              /* Output partition */
1781 #endif
1782   int            fd_stdout, fd_pipe[2];
1783   PetscInt      *points;
1784   int            i, v, p;
1785   PetscErrorCode ierr;
1786 
1787   PetscFunctionBegin;
1788   ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
1789 #if defined (PETSC_USE_DEBUG)
1790   {
1791     int ival,isum;
1792     PetscBool distributed;
1793 
1794     ival = (numVertices > 0);
1795     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1796     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1797     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1798   }
1799 #endif
1800   if (!numVertices) { /* distributed case, return if not holding the graph */
1801     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1802     PetscFunctionReturn(0);
1803   }
1804   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1805   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1806 
1807   if (global_method == INERTIAL_METHOD) {
1808     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1809     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1810   }
1811   mesh_dims[0] = nparts;
1812   mesh_dims[1] = 1;
1813   mesh_dims[2] = 1;
1814   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1815   /* Chaco outputs to stdout. We redirect this to a buffer. */
1816   /* TODO: check error codes for UNIX calls */
1817 #if defined(PETSC_HAVE_UNISTD_H)
1818   {
1819     int piperet;
1820     piperet = pipe(fd_pipe);
1821     if (piperet) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"Could not create pipe");
1822     fd_stdout = dup(1);
1823     close(1);
1824     dup2(fd_pipe[1], 1);
1825   }
1826 #endif
1827   if (part->usevwgt) { ierr = PetscInfo(part,"PETSCPARTITIONERCHACO ignores vertex weights\n");CHKERRQ(ierr); }
1828   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1829                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1830                    vmax, ndims, eigtol, seed);
1831 #if defined(PETSC_HAVE_UNISTD_H)
1832   {
1833     char msgLog[10000];
1834     int  count;
1835 
1836     fflush(stdout);
1837     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1838     if (count < 0) count = 0;
1839     msgLog[count] = 0;
1840     close(1);
1841     dup2(fd_stdout, 1);
1842     close(fd_stdout);
1843     close(fd_pipe[0]);
1844     close(fd_pipe[1]);
1845     if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1846   }
1847 #else
1848   if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1849 #endif
1850   /* Convert to PetscSection+IS */
1851   for (v = 0; v < nvtxs; ++v) {
1852     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1853   }
1854   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1855   for (p = 0, i = 0; p < nparts; ++p) {
1856     for (v = 0; v < nvtxs; ++v) {
1857       if (assignment[v] == p) points[i++] = v;
1858     }
1859   }
1860   if (i != nvtxs) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1861   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1862   if (global_method == INERTIAL_METHOD) {
1863     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1864   }
1865   ierr = PetscFree(assignment);CHKERRQ(ierr);
1866   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1867   PetscFunctionReturn(0);
1868 #else
1869   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1870 #endif
1871 }
1872 
1873 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1874 {
1875   PetscFunctionBegin;
1876   part->noGraph        = PETSC_FALSE;
1877   part->ops->view      = PetscPartitionerView_Chaco;
1878   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1879   part->ops->partition = PetscPartitionerPartition_Chaco;
1880   PetscFunctionReturn(0);
1881 }
1882 
1883 /*MC
1884   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1885 
1886   Level: intermediate
1887 
1888 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1889 M*/
1890 
1891 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1892 {
1893   PetscPartitioner_Chaco *p;
1894   PetscErrorCode          ierr;
1895 
1896   PetscFunctionBegin;
1897   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1898   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1899   part->data = p;
1900 
1901   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1902   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1903   PetscFunctionReturn(0);
1904 }
1905 
1906 static const char *ptypes[] = {"kway", "rb"};
1907 
1908 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1909 {
1910   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1911   PetscErrorCode             ierr;
1912 
1913   PetscFunctionBegin;
1914   ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
1915   ierr = PetscFree(p);CHKERRQ(ierr);
1916   PetscFunctionReturn(0);
1917 }
1918 
1919 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1920 {
1921   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1922   PetscErrorCode             ierr;
1923 
1924   PetscFunctionBegin;
1925   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1926   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1927   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1928   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1929   ierr = PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);CHKERRQ(ierr);
1930   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1931   PetscFunctionReturn(0);
1932 }
1933 
1934 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1935 {
1936   PetscBool      iascii;
1937   PetscErrorCode ierr;
1938 
1939   PetscFunctionBegin;
1940   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1941   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1942   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1943   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1944   PetscFunctionReturn(0);
1945 }
1946 
1947 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1948 {
1949   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1950   PetscErrorCode            ierr;
1951 
1952   PetscFunctionBegin;
1953   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1954   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1955   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1956   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1957   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);CHKERRQ(ierr);
1958   ierr = PetscOptionsTail();CHKERRQ(ierr);
1959   PetscFunctionReturn(0);
1960 }
1961 
1962 #if defined(PETSC_HAVE_PARMETIS)
1963 #include <parmetis.h>
1964 #endif
1965 
1966 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1967 {
1968 #if defined(PETSC_HAVE_PARMETIS)
1969   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1970   MPI_Comm       comm;
1971   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1972   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1973   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1974   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1975   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1976   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1977   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1978   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1979   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1980   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1981   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1982   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1983   PetscInt       options[64];               /* Options */
1984   PetscInt       v, i, *assignment, *points;
1985   PetscMPIInt    p, size, rank;
1986   PetscBool      hasempty = PETSC_FALSE;
1987   PetscErrorCode ierr;
1988 
1989   PetscFunctionBegin;
1990   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1991   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1992   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1993   /* Calculate vertex distribution */
1994   ierr = PetscMalloc4(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1995   vtxdist[0] = 0;
1996   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1997   for (p = 2; p <= size; ++p) {
1998     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
1999     vtxdist[p] += vtxdist[p-1];
2000   }
2001   /* null graph */
2002   if (vtxdist[size] == 0) {
2003     ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
2004     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2005     PetscFunctionReturn(0);
2006   }
2007   /* Calculate partition weights */
2008   if (targetSection) {
2009     PetscInt p;
2010     real_t   sumt = 0.0;
2011 
2012     for (p = 0; p < nparts; ++p) {
2013       PetscInt tpd;
2014 
2015       ierr = PetscSectionGetDof(targetSection,p,&tpd);CHKERRQ(ierr);
2016       sumt += tpd;
2017       tpwgts[p] = tpd;
2018     }
2019     if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
2020       for (p = 0, sumt = 0.0; p < nparts; ++p) {
2021         tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
2022         sumt += tpwgts[p];
2023       }
2024       for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
2025       for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
2026       tpwgts[nparts - 1] = 1. - sumt;
2027     }
2028   } else {
2029     for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0/nparts;
2030   }
2031   ubvec[0] = pm->imbalanceRatio;
2032 
2033   /* Weight cells */
2034   if (vertSection) {
2035     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
2036     for (v = 0; v < nvtxs; ++v) {
2037       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
2038     }
2039     wgtflag |= 2; /* have weights on graph vertices */
2040   }
2041 
2042   for (p = 0; !vtxdist[p+1] && p < size; ++p);
2043   if (vtxdist[p+1] == vtxdist[size]) {
2044     if (rank == p) {
2045       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
2046       options[METIS_OPTION_DBGLVL] = pm->debugFlag;
2047       options[METIS_OPTION_SEED]   = pm->randomSeed;
2048       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
2049       if (metis_ptype == 1) {
2050         PetscStackPush("METIS_PartGraphRecursive");
2051         ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2052         PetscStackPop;
2053         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
2054       } else {
2055         /*
2056          It would be nice to activate the two options below, but they would need some actual testing.
2057          - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
2058          - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
2059         */
2060         /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
2061         /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
2062         PetscStackPush("METIS_PartGraphKway");
2063         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2064         PetscStackPop;
2065         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
2066       }
2067     }
2068   } else {
2069     MPI_Comm pcomm;
2070 
2071     options[0] = 1; /*use options */
2072     options[1] = pm->debugFlag;
2073     options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
2074 
2075     if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
2076       PetscInt cnt;
2077 
2078       ierr = MPI_Comm_split(pm->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
2079       for (p=0,cnt=0;p<size;p++) {
2080         if (vtxdist[p+1] != vtxdist[p]) {
2081           vtxdist[cnt+1] = vtxdist[p+1];
2082           cnt++;
2083         }
2084       }
2085     } else pcomm = pm->pcomm;
2086     if (nvtxs) {
2087       PetscStackPush("ParMETIS_V3_PartKway");
2088       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
2089       PetscStackPop;
2090       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
2091     }
2092     if (hasempty) {
2093       ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
2094     }
2095   }
2096 
2097   /* Convert to PetscSection+IS */
2098   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2099   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2100   for (p = 0, i = 0; p < nparts; ++p) {
2101     for (v = 0; v < nvtxs; ++v) {
2102       if (assignment[v] == p) points[i++] = v;
2103     }
2104   }
2105   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2106   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2107   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
2108   ierr = PetscFree(vwgt);CHKERRQ(ierr);
2109   PetscFunctionReturn(0);
2110 #else
2111   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
2112 #endif
2113 }
2114 
2115 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
2116 {
2117   PetscFunctionBegin;
2118   part->noGraph             = PETSC_FALSE;
2119   part->ops->view           = PetscPartitionerView_ParMetis;
2120   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
2121   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
2122   part->ops->partition      = PetscPartitionerPartition_ParMetis;
2123   PetscFunctionReturn(0);
2124 }
2125 
2126 /*MC
2127   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
2128 
2129   Level: intermediate
2130 
2131   Options Database Keys:
2132 +  -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
2133 .  -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
2134 .  -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
2135 -  -petscpartitioner_parmetis_seed <int> - Random seed
2136 
2137   Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
2138 
2139 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2140 M*/
2141 
2142 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
2143 {
2144   PetscPartitioner_ParMetis *p;
2145   PetscErrorCode          ierr;
2146 
2147   PetscFunctionBegin;
2148   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2149   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
2150   part->data = p;
2151 
2152   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
2153   p->ptype          = 0;
2154   p->imbalanceRatio = 1.05;
2155   p->debugFlag      = 0;
2156   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
2157 
2158   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
2159   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
2160   PetscFunctionReturn(0);
2161 }
2162 
2163 #if defined(PETSC_HAVE_PTSCOTCH)
2164 
2165 EXTERN_C_BEGIN
2166 #include <ptscotch.h>
2167 EXTERN_C_END
2168 
2169 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
2170 
2171 static int PTScotch_Strategy(PetscInt strategy)
2172 {
2173   switch (strategy) {
2174   case  0: return SCOTCH_STRATDEFAULT;
2175   case  1: return SCOTCH_STRATQUALITY;
2176   case  2: return SCOTCH_STRATSPEED;
2177   case  3: return SCOTCH_STRATBALANCE;
2178   case  4: return SCOTCH_STRATSAFETY;
2179   case  5: return SCOTCH_STRATSCALABILITY;
2180   case  6: return SCOTCH_STRATRECURSIVE;
2181   case  7: return SCOTCH_STRATREMAP;
2182   default: return SCOTCH_STRATDEFAULT;
2183   }
2184 }
2185 
2186 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2187                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
2188 {
2189   SCOTCH_Graph   grafdat;
2190   SCOTCH_Strat   stradat;
2191   SCOTCH_Num     vertnbr = n;
2192   SCOTCH_Num     edgenbr = xadj[n];
2193   SCOTCH_Num*    velotab = vtxwgt;
2194   SCOTCH_Num*    edlotab = adjwgt;
2195   SCOTCH_Num     flagval = strategy;
2196   double         kbalval = imbalance;
2197   PetscErrorCode ierr;
2198 
2199   PetscFunctionBegin;
2200   {
2201     PetscBool flg = PETSC_TRUE;
2202     ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
2203     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2204     if (!flg) velotab = NULL;
2205   }
2206   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
2207   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
2208   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2209   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
2210   if (tpart) {
2211     SCOTCH_Arch archdat;
2212     ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2213     ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2214     ierr = SCOTCH_graphMap(&grafdat, &archdat, &stradat, part);CHKERRPTSCOTCH(ierr);
2215     SCOTCH_archExit(&archdat);
2216   } else {
2217     ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
2218   }
2219   SCOTCH_stratExit(&stradat);
2220   SCOTCH_graphExit(&grafdat);
2221   PetscFunctionReturn(0);
2222 }
2223 
2224 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2225                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
2226 {
2227   PetscMPIInt     procglbnbr;
2228   PetscMPIInt     proclocnum;
2229   SCOTCH_Arch     archdat;
2230   SCOTCH_Dgraph   grafdat;
2231   SCOTCH_Dmapping mappdat;
2232   SCOTCH_Strat    stradat;
2233   SCOTCH_Num      vertlocnbr;
2234   SCOTCH_Num      edgelocnbr;
2235   SCOTCH_Num*     veloloctab = vtxwgt;
2236   SCOTCH_Num*     edloloctab = adjwgt;
2237   SCOTCH_Num      flagval = strategy;
2238   double          kbalval = imbalance;
2239   PetscErrorCode  ierr;
2240 
2241   PetscFunctionBegin;
2242   {
2243     PetscBool flg = PETSC_TRUE;
2244     ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
2245     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2246     if (!flg) veloloctab = NULL;
2247   }
2248   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
2249   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
2250   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
2251   edgelocnbr = xadj[vertlocnbr];
2252 
2253   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
2254   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
2255   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2256   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
2257   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2258   if (tpart) { /* target partition weights */
2259     ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2260   } else {
2261     ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2262   }
2263   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
2264 
2265   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2266   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2267   SCOTCH_archExit(&archdat);
2268   SCOTCH_stratExit(&stradat);
2269   SCOTCH_dgraphExit(&grafdat);
2270   PetscFunctionReturn(0);
2271 }
2272 
2273 #endif /* PETSC_HAVE_PTSCOTCH */
2274 
2275 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2276 {
2277   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2278   PetscErrorCode             ierr;
2279 
2280   PetscFunctionBegin;
2281   ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
2282   ierr = PetscFree(p);CHKERRQ(ierr);
2283   PetscFunctionReturn(0);
2284 }
2285 
2286 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2287 {
2288   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2289   PetscErrorCode            ierr;
2290 
2291   PetscFunctionBegin;
2292   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
2293   ierr = PetscViewerASCIIPrintf(viewer,"using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
2294   ierr = PetscViewerASCIIPrintf(viewer,"using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
2295   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
2296   PetscFunctionReturn(0);
2297 }
2298 
2299 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2300 {
2301   PetscBool      iascii;
2302   PetscErrorCode ierr;
2303 
2304   PetscFunctionBegin;
2305   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2306   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
2307   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
2308   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
2309   PetscFunctionReturn(0);
2310 }
2311 
2312 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2313 {
2314   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2315   const char *const         *slist = PTScotchStrategyList;
2316   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2317   PetscBool                 flag;
2318   PetscErrorCode            ierr;
2319 
2320   PetscFunctionBegin;
2321   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
2322   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
2323   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
2324   ierr = PetscOptionsTail();CHKERRQ(ierr);
2325   PetscFunctionReturn(0);
2326 }
2327 
2328 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
2329 {
2330 #if defined(PETSC_HAVE_PTSCOTCH)
2331   MPI_Comm       comm;
2332   PetscInt       nvtxs = numVertices;   /* The number of vertices in full graph */
2333   PetscInt       *vtxdist;              /* Distribution of vertices across processes */
2334   PetscInt       *xadj   = start;       /* Start of edge list for each vertex */
2335   PetscInt       *adjncy = adjacency;   /* Edge lists for all vertices */
2336   PetscInt       *vwgt   = NULL;        /* Vertex weights */
2337   PetscInt       *adjwgt = NULL;        /* Edge weights */
2338   PetscInt       v, i, *assignment, *points;
2339   PetscMPIInt    size, rank, p;
2340   PetscBool      hasempty = PETSC_FALSE;
2341   PetscInt       *tpwgts = NULL;
2342   PetscErrorCode ierr;
2343 
2344   PetscFunctionBegin;
2345   ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
2346   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2347   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2348   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
2349   /* Calculate vertex distribution */
2350   vtxdist[0] = 0;
2351   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2352   for (p = 2; p <= size; ++p) {
2353     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
2354     vtxdist[p] += vtxdist[p-1];
2355   }
2356   /* null graph */
2357   if (vtxdist[size] == 0) {
2358     ierr = PetscFree2(vtxdist, assignment);CHKERRQ(ierr);
2359     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2360     PetscFunctionReturn(0);
2361   }
2362 
2363   /* Calculate vertex weights */
2364   if (vertSection) {
2365     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
2366     for (v = 0; v < nvtxs; ++v) {
2367       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
2368     }
2369   }
2370 
2371   /* Calculate partition weights */
2372   if (targetSection) {
2373     PetscInt sumw;
2374 
2375     ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
2376     for (p = 0, sumw = 0; p < nparts; ++p) {
2377       ierr = PetscSectionGetDof(targetSection,p,&tpwgts[p]);CHKERRQ(ierr);
2378       sumw += tpwgts[p];
2379     }
2380     if (!sumw) {
2381       ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2382     }
2383   }
2384 
2385   {
2386     PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2387     int                       strat = PTScotch_Strategy(pts->strategy);
2388     double                    imbal = (double)pts->imbalance;
2389 
2390     for (p = 0; !vtxdist[p+1] && p < size; ++p);
2391     if (vtxdist[p+1] == vtxdist[size]) {
2392       if (rank == p) {
2393         ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment);CHKERRQ(ierr);
2394       }
2395     } else {
2396       PetscInt cnt;
2397       MPI_Comm pcomm;
2398 
2399       if (hasempty) {
2400         ierr = MPI_Comm_split(pts->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
2401         for (p=0,cnt=0;p<size;p++) {
2402           if (vtxdist[p+1] != vtxdist[p]) {
2403             vtxdist[cnt+1] = vtxdist[p+1];
2404             cnt++;
2405           }
2406         }
2407       } else pcomm = pts->pcomm;
2408       if (nvtxs) {
2409         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm);CHKERRQ(ierr);
2410       }
2411       if (hasempty) {
2412         ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
2413       }
2414     }
2415   }
2416   ierr = PetscFree(vwgt);CHKERRQ(ierr);
2417   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2418 
2419   /* Convert to PetscSection+IS */
2420   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2421   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2422   for (p = 0, i = 0; p < nparts; ++p) {
2423     for (v = 0; v < nvtxs; ++v) {
2424       if (assignment[v] == p) points[i++] = v;
2425     }
2426   }
2427   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2428   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2429 
2430   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
2431   PetscFunctionReturn(0);
2432 #else
2433   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2434 #endif
2435 }
2436 
2437 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2438 {
2439   PetscFunctionBegin;
2440   part->noGraph             = PETSC_FALSE;
2441   part->ops->view           = PetscPartitionerView_PTScotch;
2442   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2443   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2444   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2445   PetscFunctionReturn(0);
2446 }
2447 
2448 /*MC
2449   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2450 
2451   Level: intermediate
2452 
2453   Options Database Keys:
2454 +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
2455 -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
2456 
2457   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
2458 
2459 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2460 M*/
2461 
2462 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2463 {
2464   PetscPartitioner_PTScotch *p;
2465   PetscErrorCode          ierr;
2466 
2467   PetscFunctionBegin;
2468   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2469   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2470   part->data = p;
2471 
2472   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
2473   p->strategy  = 0;
2474   p->imbalance = 0.01;
2475 
2476   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2477   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2478   PetscFunctionReturn(0);
2479 }
2480 
2481 /*@
2482   DMPlexGetPartitioner - Get the mesh partitioner
2483 
2484   Not collective
2485 
2486   Input Parameter:
2487 . dm - The DM
2488 
2489   Output Parameter:
2490 . part - The PetscPartitioner
2491 
2492   Level: developer
2493 
2494   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2495 
2496 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerDMPlexPartition(), PetscPartitionerCreate()
2497 @*/
2498 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2499 {
2500   DM_Plex       *mesh = (DM_Plex *) dm->data;
2501 
2502   PetscFunctionBegin;
2503   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2504   PetscValidPointer(part, 2);
2505   *part = mesh->partitioner;
2506   PetscFunctionReturn(0);
2507 }
2508 
2509 /*@
2510   DMPlexSetPartitioner - Set the mesh partitioner
2511 
2512   logically collective on DM
2513 
2514   Input Parameters:
2515 + dm - The DM
2516 - part - The partitioner
2517 
2518   Level: developer
2519 
2520   Note: Any existing PetscPartitioner will be destroyed.
2521 
2522 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2523 @*/
2524 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2525 {
2526   DM_Plex       *mesh = (DM_Plex *) dm->data;
2527   PetscErrorCode ierr;
2528 
2529   PetscFunctionBegin;
2530   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2531   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2532   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2533   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2534   mesh->partitioner = part;
2535   PetscFunctionReturn(0);
2536 }
2537 
2538 static PetscErrorCode DMPlexAddClosure_Private(DM dm, PetscHSetI ht, PetscInt point)
2539 {
2540   const PetscInt *cone;
2541   PetscInt       coneSize, c;
2542   PetscBool      missing;
2543   PetscErrorCode ierr;
2544 
2545   PetscFunctionBeginHot;
2546   ierr = PetscHSetIQueryAdd(ht, point, &missing);CHKERRQ(ierr);
2547   if (missing) {
2548     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2549     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2550     for (c = 0; c < coneSize; c++) {
2551       ierr = DMPlexAddClosure_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2552     }
2553   }
2554   PetscFunctionReturn(0);
2555 }
2556 
2557 PETSC_UNUSED static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2558 {
2559   PetscErrorCode ierr;
2560 
2561   PetscFunctionBegin;
2562   if (up) {
2563     PetscInt parent;
2564 
2565     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2566     if (parent != point) {
2567       PetscInt closureSize, *closure = NULL, i;
2568 
2569       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2570       for (i = 0; i < closureSize; i++) {
2571         PetscInt cpoint = closure[2*i];
2572 
2573         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2574         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2575       }
2576       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2577     }
2578   }
2579   if (down) {
2580     PetscInt numChildren;
2581     const PetscInt *children;
2582 
2583     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2584     if (numChildren) {
2585       PetscInt i;
2586 
2587       for (i = 0; i < numChildren; i++) {
2588         PetscInt cpoint = children[i];
2589 
2590         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2591         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2592       }
2593     }
2594   }
2595   PetscFunctionReturn(0);
2596 }
2597 
2598 static PetscErrorCode DMPlexAddClosureTree_Up_Private(DM dm, PetscHSetI ht, PetscInt point)
2599 {
2600   PetscInt       parent;
2601   PetscErrorCode ierr;
2602 
2603   PetscFunctionBeginHot;
2604   ierr = DMPlexGetTreeParent(dm, point, &parent,NULL);CHKERRQ(ierr);
2605   if (point != parent) {
2606     const PetscInt *cone;
2607     PetscInt       coneSize, c;
2608 
2609     ierr = DMPlexAddClosureTree_Up_Private(dm, ht, parent);CHKERRQ(ierr);
2610     ierr = DMPlexAddClosure_Private(dm, ht, parent);CHKERRQ(ierr);
2611     ierr = DMPlexGetCone(dm, parent, &cone);CHKERRQ(ierr);
2612     ierr = DMPlexGetConeSize(dm, parent, &coneSize);CHKERRQ(ierr);
2613     for (c = 0; c < coneSize; c++) {
2614       const PetscInt cp = cone[c];
2615 
2616       ierr = DMPlexAddClosureTree_Up_Private(dm, ht, cp);CHKERRQ(ierr);
2617     }
2618   }
2619   PetscFunctionReturn(0);
2620 }
2621 
2622 static PetscErrorCode DMPlexAddClosureTree_Down_Private(DM dm, PetscHSetI ht, PetscInt point)
2623 {
2624   PetscInt       i, numChildren;
2625   const PetscInt *children;
2626   PetscErrorCode ierr;
2627 
2628   PetscFunctionBeginHot;
2629   ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
2630   for (i = 0; i < numChildren; i++) {
2631     ierr = PetscHSetIAdd(ht, children[i]);CHKERRQ(ierr);
2632   }
2633   PetscFunctionReturn(0);
2634 }
2635 
2636 static PetscErrorCode DMPlexAddClosureTree_Private(DM dm, PetscHSetI ht, PetscInt point)
2637 {
2638   const PetscInt *cone;
2639   PetscInt       coneSize, c;
2640   PetscErrorCode ierr;
2641 
2642   PetscFunctionBeginHot;
2643   ierr = PetscHSetIAdd(ht, point);CHKERRQ(ierr);
2644   ierr = DMPlexAddClosureTree_Up_Private(dm, ht, point);CHKERRQ(ierr);
2645   ierr = DMPlexAddClosureTree_Down_Private(dm, ht, point);CHKERRQ(ierr);
2646   ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2647   ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2648   for (c = 0; c < coneSize; c++) {
2649     ierr = DMPlexAddClosureTree_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2650   }
2651   PetscFunctionReturn(0);
2652 }
2653 
2654 PetscErrorCode DMPlexClosurePoints_Private(DM dm, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2655 {
2656   DM_Plex         *mesh = (DM_Plex *)dm->data;
2657   const PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2658   PetscInt        nelems, *elems, off = 0, p;
2659   PetscHSetI      ht;
2660   PetscErrorCode  ierr;
2661 
2662   PetscFunctionBegin;
2663   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2664   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2665   if (!hasTree) {
2666     for (p = 0; p < numPoints; ++p) {
2667       ierr = DMPlexAddClosure_Private(dm, ht, points[p]);CHKERRQ(ierr);
2668     }
2669   } else {
2670 #if 1
2671     for (p = 0; p < numPoints; ++p) {
2672       ierr = DMPlexAddClosureTree_Private(dm, ht, points[p]);CHKERRQ(ierr);
2673     }
2674 #else
2675     PetscInt  *closure = NULL, closureSize, c;
2676     for (p = 0; p < numPoints; ++p) {
2677       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2678       for (c = 0; c < closureSize*2; c += 2) {
2679         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2680         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2681       }
2682     }
2683     if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2684 #endif
2685   }
2686   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2687   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2688   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2689   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2690   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2691   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2692   PetscFunctionReturn(0);
2693 }
2694 
2695 /*@
2696   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2697 
2698   Input Parameters:
2699 + dm     - The DM
2700 - label  - DMLabel assinging ranks to remote roots
2701 
2702   Level: developer
2703 
2704 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2705 @*/
2706 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2707 {
2708   IS              rankIS,   pointIS, closureIS;
2709   const PetscInt *ranks,   *points;
2710   PetscInt        numRanks, numPoints, r;
2711   PetscErrorCode  ierr;
2712 
2713   PetscFunctionBegin;
2714   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2715   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2716   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2717   for (r = 0; r < numRanks; ++r) {
2718     const PetscInt rank = ranks[r];
2719     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2720     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2721     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2722     ierr = DMPlexClosurePoints_Private(dm, numPoints, points, &closureIS);CHKERRQ(ierr);
2723     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2724     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2725     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2726     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2727   }
2728   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2729   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2730   PetscFunctionReturn(0);
2731 }
2732 
2733 /*@
2734   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2735 
2736   Input Parameters:
2737 + dm     - The DM
2738 - label  - DMLabel assinging ranks to remote roots
2739 
2740   Level: developer
2741 
2742 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2743 @*/
2744 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2745 {
2746   IS              rankIS,   pointIS;
2747   const PetscInt *ranks,   *points;
2748   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2749   PetscInt       *adj = NULL;
2750   PetscErrorCode  ierr;
2751 
2752   PetscFunctionBegin;
2753   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2754   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2755   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2756   for (r = 0; r < numRanks; ++r) {
2757     const PetscInt rank = ranks[r];
2758 
2759     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2760     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2761     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2762     for (p = 0; p < numPoints; ++p) {
2763       adjSize = PETSC_DETERMINE;
2764       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2765       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2766     }
2767     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2768     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2769   }
2770   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2771   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2772   ierr = PetscFree(adj);CHKERRQ(ierr);
2773   PetscFunctionReturn(0);
2774 }
2775 
2776 /*@
2777   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2778 
2779   Input Parameters:
2780 + dm     - The DM
2781 - label  - DMLabel assinging ranks to remote roots
2782 
2783   Level: developer
2784 
2785   Note: This is required when generating multi-level overlaps to capture
2786   overlap points from non-neighbouring partitions.
2787 
2788 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2789 @*/
2790 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2791 {
2792   MPI_Comm        comm;
2793   PetscMPIInt     rank;
2794   PetscSF         sfPoint;
2795   DMLabel         lblRoots, lblLeaves;
2796   IS              rankIS, pointIS;
2797   const PetscInt *ranks;
2798   PetscInt        numRanks, r;
2799   PetscErrorCode  ierr;
2800 
2801   PetscFunctionBegin;
2802   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2803   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2804   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2805   /* Pull point contributions from remote leaves into local roots */
2806   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2807   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2808   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2809   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2810   for (r = 0; r < numRanks; ++r) {
2811     const PetscInt remoteRank = ranks[r];
2812     if (remoteRank == rank) continue;
2813     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2814     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2815     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2816   }
2817   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2818   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2819   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2820   /* Push point contributions from roots into remote leaves */
2821   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2822   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2823   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2824   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2825   for (r = 0; r < numRanks; ++r) {
2826     const PetscInt remoteRank = ranks[r];
2827     if (remoteRank == rank) continue;
2828     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2829     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2830     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2831   }
2832   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2833   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2834   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2835   PetscFunctionReturn(0);
2836 }
2837 
2838 /*@
2839   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2840 
2841   Input Parameters:
2842 + dm        - The DM
2843 . rootLabel - DMLabel assinging ranks to local roots
2844 - processSF - A star forest mapping into the local index on each remote rank
2845 
2846   Output Parameter:
2847 . leafLabel - DMLabel assinging ranks to remote roots
2848 
2849   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2850   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2851 
2852   Level: developer
2853 
2854 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2855 @*/
2856 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2857 {
2858   MPI_Comm           comm;
2859   PetscMPIInt        rank, size, r;
2860   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2861   PetscSF            sfPoint;
2862   PetscSection       rootSection;
2863   PetscSFNode       *rootPoints, *leafPoints;
2864   const PetscSFNode *remote;
2865   const PetscInt    *local, *neighbors;
2866   IS                 valueIS;
2867   PetscBool          mpiOverflow = PETSC_FALSE;
2868   PetscErrorCode     ierr;
2869 
2870   PetscFunctionBegin;
2871   ierr = PetscLogEventBegin(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2872   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2873   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2874   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2875   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2876 
2877   /* Convert to (point, rank) and use actual owners */
2878   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2879   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2880   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2881   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2882   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2883   for (n = 0; n < numNeighbors; ++n) {
2884     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2885     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2886   }
2887   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2888   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2889   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2890   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2891   for (n = 0; n < numNeighbors; ++n) {
2892     IS              pointIS;
2893     const PetscInt *points;
2894 
2895     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2896     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2897     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2898     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2899     for (p = 0; p < numPoints; ++p) {
2900       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2901       else       {l = -1;}
2902       if (l >= 0) {rootPoints[off+p] = remote[l];}
2903       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2904     }
2905     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2906     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2907   }
2908 
2909   /* Try to communicate overlap using All-to-All */
2910   if (!processSF) {
2911     PetscInt64  counter = 0;
2912     PetscBool   locOverflow = PETSC_FALSE;
2913     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2914 
2915     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2916     for (n = 0; n < numNeighbors; ++n) {
2917       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2918       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2919 #if defined(PETSC_USE_64BIT_INDICES)
2920       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2921       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2922 #endif
2923       scounts[neighbors[n]] = (PetscMPIInt) dof;
2924       sdispls[neighbors[n]] = (PetscMPIInt) off;
2925     }
2926     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2927     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2928     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2929     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2930     if (!mpiOverflow) {
2931       ierr = PetscInfo(dm,"Using Alltoallv for mesh distribution\n");CHKERRQ(ierr);
2932       leafSize = (PetscInt) counter;
2933       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2934       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2935     }
2936     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2937   }
2938 
2939   /* Communicate overlap using process star forest */
2940   if (processSF || mpiOverflow) {
2941     PetscSF      procSF;
2942     PetscSection leafSection;
2943 
2944     if (processSF) {
2945       ierr = PetscInfo(dm,"Using processSF for mesh distribution\n");CHKERRQ(ierr);
2946       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2947       procSF = processSF;
2948     } else {
2949       ierr = PetscInfo(dm,"Using processSF for mesh distribution (MPI overflow)\n");CHKERRQ(ierr);
2950       ierr = PetscSFCreate(comm,&procSF);CHKERRQ(ierr);
2951       ierr = PetscSFSetGraphWithPattern(procSF,NULL,PETSCSF_PATTERN_ALLTOALL);CHKERRQ(ierr);
2952     }
2953 
2954     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2955     ierr = DMPlexDistributeData(dm, procSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2956     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2957     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2958     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2959   }
2960 
2961   for (p = 0; p < leafSize; p++) {
2962     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2963   }
2964 
2965   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2966   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2967   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2968   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2969   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2970   ierr = PetscLogEventEnd(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2971   PetscFunctionReturn(0);
2972 }
2973 
2974 /*@
2975   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2976 
2977   Input Parameters:
2978 + dm    - The DM
2979 . label - DMLabel assinging ranks to remote roots
2980 
2981   Output Parameter:
2982 . sf    - The star forest communication context encapsulating the defined mapping
2983 
2984   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2985 
2986   Level: developer
2987 
2988 .seealso: DMPlexDistribute(), DMPlexCreateOverlap()
2989 @*/
2990 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2991 {
2992   PetscMPIInt     rank;
2993   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0, nNeighbors;
2994   PetscSFNode    *remotePoints;
2995   IS              remoteRootIS, neighborsIS;
2996   const PetscInt *remoteRoots, *neighbors;
2997   PetscErrorCode ierr;
2998 
2999   PetscFunctionBegin;
3000   ierr = PetscLogEventBegin(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
3001   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
3002 
3003   ierr = DMLabelGetValueIS(label, &neighborsIS);CHKERRQ(ierr);
3004 #if 0
3005   {
3006     IS is;
3007     ierr = ISDuplicate(neighborsIS, &is);CHKERRQ(ierr);
3008     ierr = ISSort(is);CHKERRQ(ierr);
3009     ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
3010     neighborsIS = is;
3011   }
3012 #endif
3013   ierr = ISGetLocalSize(neighborsIS, &nNeighbors);CHKERRQ(ierr);
3014   ierr = ISGetIndices(neighborsIS, &neighbors);CHKERRQ(ierr);
3015   for (numRemote = 0, n = 0; n < nNeighbors; ++n) {
3016     ierr = DMLabelGetStratumSize(label, neighbors[n], &numPoints);CHKERRQ(ierr);
3017     numRemote += numPoints;
3018   }
3019   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
3020   /* Put owned points first */
3021   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
3022   if (numPoints > 0) {
3023     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
3024     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3025     for (p = 0; p < numPoints; p++) {
3026       remotePoints[idx].index = remoteRoots[p];
3027       remotePoints[idx].rank = rank;
3028       idx++;
3029     }
3030     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3031     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
3032   }
3033   /* Now add remote points */
3034   for (n = 0; n < nNeighbors; ++n) {
3035     const PetscInt nn = neighbors[n];
3036 
3037     ierr = DMLabelGetStratumSize(label, nn, &numPoints);CHKERRQ(ierr);
3038     if (nn == rank || numPoints <= 0) continue;
3039     ierr = DMLabelGetStratumIS(label, nn, &remoteRootIS);CHKERRQ(ierr);
3040     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3041     for (p = 0; p < numPoints; p++) {
3042       remotePoints[idx].index = remoteRoots[p];
3043       remotePoints[idx].rank = nn;
3044       idx++;
3045     }
3046     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3047     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
3048   }
3049   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
3050   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3051   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
3052   ierr = PetscSFSetUp(*sf);CHKERRQ(ierr);
3053   ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
3054   ierr = PetscLogEventEnd(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
3055   PetscFunctionReturn(0);
3056 }
3057 
3058 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
3059  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
3060  * them out in that case. */
3061 #if defined(PETSC_HAVE_PARMETIS)
3062 /*@C
3063 
3064   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
3065 
3066   Input parameters:
3067 + dm                - The DMPlex object.
3068 . n                 - The number of points.
3069 . pointsToRewrite   - The points in the SF whose ownership will change.
3070 . targetOwners      - New owner for each element in pointsToRewrite.
3071 - degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
3072 
3073   Level: developer
3074 
3075 @*/
3076 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
3077 {
3078   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
3079   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
3080   PetscSFNode  *leafLocationsNew;
3081   const         PetscSFNode *iremote;
3082   const         PetscInt *ilocal;
3083   PetscBool    *isLeaf;
3084   PetscSF       sf;
3085   MPI_Comm      comm;
3086   PetscMPIInt   rank, size;
3087 
3088   PetscFunctionBegin;
3089   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3090   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3091   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3092   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3093 
3094   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3095   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3096   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3097   for (i=0; i<pEnd-pStart; i++) {
3098     isLeaf[i] = PETSC_FALSE;
3099   }
3100   for (i=0; i<nleafs; i++) {
3101     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3102   }
3103 
3104   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
3105   cumSumDegrees[0] = 0;
3106   for (i=1; i<=pEnd-pStart; i++) {
3107     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
3108   }
3109   sumDegrees = cumSumDegrees[pEnd-pStart];
3110   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
3111 
3112   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
3113   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
3114   for (i=0; i<pEnd-pStart; i++) {
3115     rankOnLeafs[i] = rank;
3116   }
3117   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
3118   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
3119   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
3120 
3121   /* get the remote local points of my leaves */
3122   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
3123   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
3124   for (i=0; i<pEnd-pStart; i++) {
3125     points[i] = pStart+i;
3126   }
3127   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
3128   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
3129   ierr = PetscFree(points);CHKERRQ(ierr);
3130   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
3131   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
3132   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
3133   for (i=0; i<pEnd-pStart; i++) {
3134     newOwners[i] = -1;
3135     newNumbers[i] = -1;
3136   }
3137   {
3138     PetscInt oldNumber, newNumber, oldOwner, newOwner;
3139     for (i=0; i<n; i++) {
3140       oldNumber = pointsToRewrite[i];
3141       newNumber = -1;
3142       oldOwner = rank;
3143       newOwner = targetOwners[i];
3144       if (oldOwner == newOwner) {
3145         newNumber = oldNumber;
3146       } else {
3147         for (j=0; j<degrees[oldNumber]; j++) {
3148           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
3149             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
3150             break;
3151           }
3152         }
3153       }
3154       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
3155 
3156       newOwners[oldNumber] = newOwner;
3157       newNumbers[oldNumber] = newNumber;
3158     }
3159   }
3160   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
3161   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
3162   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
3163 
3164   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
3165   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
3166   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
3167   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
3168 
3169   /* Now count how many leafs we have on each processor. */
3170   leafCounter=0;
3171   for (i=0; i<pEnd-pStart; i++) {
3172     if (newOwners[i] >= 0) {
3173       if (newOwners[i] != rank) {
3174         leafCounter++;
3175       }
3176     } else {
3177       if (isLeaf[i]) {
3178         leafCounter++;
3179       }
3180     }
3181   }
3182 
3183   /* Now set up the new sf by creating the leaf arrays */
3184   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
3185   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
3186 
3187   leafCounter = 0;
3188   counter = 0;
3189   for (i=0; i<pEnd-pStart; i++) {
3190     if (newOwners[i] >= 0) {
3191       if (newOwners[i] != rank) {
3192         leafsNew[leafCounter] = i;
3193         leafLocationsNew[leafCounter].rank = newOwners[i];
3194         leafLocationsNew[leafCounter].index = newNumbers[i];
3195         leafCounter++;
3196       }
3197     } else {
3198       if (isLeaf[i]) {
3199         leafsNew[leafCounter] = i;
3200         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
3201         leafLocationsNew[leafCounter].index = iremote[counter].index;
3202         leafCounter++;
3203       }
3204     }
3205     if (isLeaf[i]) {
3206       counter++;
3207     }
3208   }
3209 
3210   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
3211   ierr = PetscFree(newOwners);CHKERRQ(ierr);
3212   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
3213   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3214   PetscFunctionReturn(0);
3215 }
3216 
3217 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
3218 {
3219   PetscInt *distribution, min, max, sum, i, ierr;
3220   PetscMPIInt rank, size;
3221   PetscFunctionBegin;
3222   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3223   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3224   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
3225   for (i=0; i<n; i++) {
3226     if (part) distribution[part[i]] += vtxwgt[skip*i];
3227     else distribution[rank] += vtxwgt[skip*i];
3228   }
3229   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3230   min = distribution[0];
3231   max = distribution[0];
3232   sum = distribution[0];
3233   for (i=1; i<size; i++) {
3234     if (distribution[i]<min) min=distribution[i];
3235     if (distribution[i]>max) max=distribution[i];
3236     sum += distribution[i];
3237   }
3238   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
3239   ierr = PetscFree(distribution);CHKERRQ(ierr);
3240   PetscFunctionReturn(0);
3241 }
3242 
3243 #endif
3244 
3245 /*@
3246   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
3247 
3248   Input parameters:
3249 + dm               - The DMPlex object.
3250 . entityDepth      - depth of the entity to balance (0 -> balance vertices).
3251 . useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
3252 - parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
3253 
3254   Output parameters:
3255 . success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
3256 
3257   Level: intermediate
3258 
3259 @*/
3260 
3261 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
3262 {
3263 #if defined(PETSC_HAVE_PARMETIS)
3264   PetscSF     sf;
3265   PetscInt    ierr, i, j, idx, jdx;
3266   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
3267   const       PetscInt *degrees, *ilocal;
3268   const       PetscSFNode *iremote;
3269   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
3270   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
3271   PetscMPIInt rank, size;
3272   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
3273   const       PetscInt *cumSumVertices;
3274   PetscInt    offset, counter;
3275   PetscInt    lenadjncy;
3276   PetscInt    *xadj, *adjncy, *vtxwgt;
3277   PetscInt    lenxadj;
3278   PetscInt    *adjwgt = NULL;
3279   PetscInt    *part, *options;
3280   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
3281   real_t      *ubvec;
3282   PetscInt    *firstVertices, *renumbering;
3283   PetscInt    failed, failedGlobal;
3284   MPI_Comm    comm;
3285   Mat         A, Apre;
3286   const char *prefix = NULL;
3287   PetscViewer       viewer;
3288   PetscViewerFormat format;
3289   PetscLayout layout;
3290 
3291   PetscFunctionBegin;
3292   if (success) *success = PETSC_FALSE;
3293   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3294   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3295   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3296   if (size==1) PetscFunctionReturn(0);
3297 
3298   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3299 
3300   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
3301   if (viewer) {
3302     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
3303   }
3304 
3305   /* Figure out all points in the plex that we are interested in balancing. */
3306   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
3307   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3308   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
3309 
3310   for (i=0; i<pEnd-pStart; i++) {
3311     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
3312   }
3313 
3314   /* There are three types of points:
3315    * exclusivelyOwned: points that are owned by this process and only seen by this process
3316    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
3317    * leaf: a point that is seen by this process but owned by a different process
3318    */
3319   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3320   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3321   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3322   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
3323   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
3324   for (i=0; i<pEnd-pStart; i++) {
3325     isNonExclusivelyOwned[i] = PETSC_FALSE;
3326     isExclusivelyOwned[i] = PETSC_FALSE;
3327     isLeaf[i] = PETSC_FALSE;
3328   }
3329 
3330   /* start by marking all the leafs */
3331   for (i=0; i<nleafs; i++) {
3332     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3333   }
3334 
3335   /* for an owned point, we can figure out whether another processor sees it or
3336    * not by calculating its degree */
3337   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
3338   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
3339 
3340   numExclusivelyOwned = 0;
3341   numNonExclusivelyOwned = 0;
3342   for (i=0; i<pEnd-pStart; i++) {
3343     if (toBalance[i]) {
3344       if (degrees[i] > 0) {
3345         isNonExclusivelyOwned[i] = PETSC_TRUE;
3346         numNonExclusivelyOwned += 1;
3347       } else {
3348         if (!isLeaf[i]) {
3349           isExclusivelyOwned[i] = PETSC_TRUE;
3350           numExclusivelyOwned += 1;
3351         }
3352       }
3353     }
3354   }
3355 
3356   /* We are going to build a graph with one vertex per core representing the
3357    * exclusively owned points and then one vertex per nonExclusively owned
3358    * point. */
3359 
3360   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3361   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
3362   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3363   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
3364 
3365   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3366   for (i=0; i<pEnd-pStart; i++) {globalNumbersOfLocalOwnedVertices[i] = pStart - 1;}
3367   offset = cumSumVertices[rank];
3368   counter = 0;
3369   for (i=0; i<pEnd-pStart; i++) {
3370     if (toBalance[i]) {
3371       if (degrees[i] > 0) {
3372         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
3373         counter++;
3374       }
3375     }
3376   }
3377 
3378   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
3379   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
3380   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3381   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3382 
3383   /* Now start building the data structure for ParMETIS */
3384 
3385   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
3386   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
3387   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3388   ierr = MatSetUp(Apre);CHKERRQ(ierr);
3389 
3390   for (i=0; i<pEnd-pStart; i++) {
3391     if (toBalance[i]) {
3392       idx = cumSumVertices[rank];
3393       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3394       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3395       else continue;
3396       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3397       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3398     }
3399   }
3400 
3401   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3402   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3403 
3404   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
3405   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
3406   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3407   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
3408   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
3409 
3410   for (i=0; i<pEnd-pStart; i++) {
3411     if (toBalance[i]) {
3412       idx = cumSumVertices[rank];
3413       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3414       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3415       else continue;
3416       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3417       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3418     }
3419   }
3420 
3421   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3422   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3423   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
3424   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3425 
3426   nparts = size;
3427   wgtflag = 2;
3428   numflag = 0;
3429   ncon = 2;
3430   real_t *tpwgts;
3431   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
3432   for (i=0; i<ncon*nparts; i++) {
3433     tpwgts[i] = 1./(nparts);
3434   }
3435 
3436   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
3437   ubvec[0] = 1.01;
3438   ubvec[1] = 1.01;
3439   lenadjncy = 0;
3440   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3441     PetscInt temp=0;
3442     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3443     lenadjncy += temp;
3444     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3445   }
3446   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
3447   lenxadj = 2 + numNonExclusivelyOwned;
3448   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
3449   xadj[0] = 0;
3450   counter = 0;
3451   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3452     PetscInt        temp=0;
3453     const PetscInt *cols;
3454     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3455     ierr = PetscArraycpy(&adjncy[counter], cols, temp);CHKERRQ(ierr);
3456     counter += temp;
3457     xadj[i+1] = counter;
3458     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3459   }
3460 
3461   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
3462   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
3463   vtxwgt[0] = numExclusivelyOwned;
3464   if (ncon>1) vtxwgt[1] = 1;
3465   for (i=0; i<numNonExclusivelyOwned; i++) {
3466     vtxwgt[ncon*(i+1)] = 1;
3467     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3468   }
3469 
3470   if (viewer) {
3471     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
3472     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
3473   }
3474   if (parallel) {
3475     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
3476     options[0] = 1;
3477     options[1] = 0; /* Verbosity */
3478     options[2] = 0; /* Seed */
3479     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3480     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
3481     if (useInitialGuess) {
3482       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
3483       PetscStackPush("ParMETIS_V3_RefineKway");
3484       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3485       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3486       PetscStackPop;
3487     } else {
3488       PetscStackPush("ParMETIS_V3_PartKway");
3489       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3490       PetscStackPop;
3491       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3492     }
3493     ierr = PetscFree(options);CHKERRQ(ierr);
3494   } else {
3495     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
3496     Mat As;
3497     PetscInt numRows;
3498     PetscInt *partGlobal;
3499     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
3500 
3501     PetscInt *numExclusivelyOwnedAll;
3502     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
3503     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3504     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
3505 
3506     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
3507     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
3508     if (!rank) {
3509       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3510       lenadjncy = 0;
3511 
3512       for (i=0; i<numRows; i++) {
3513         PetscInt temp=0;
3514         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3515         lenadjncy += temp;
3516         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3517       }
3518       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
3519       lenxadj = 1 + numRows;
3520       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
3521       xadj_g[0] = 0;
3522       counter = 0;
3523       for (i=0; i<numRows; i++) {
3524         PetscInt        temp=0;
3525         const PetscInt *cols;
3526         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3527         ierr = PetscArraycpy(&adjncy_g[counter], cols, temp);CHKERRQ(ierr);
3528         counter += temp;
3529         xadj_g[i+1] = counter;
3530         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3531       }
3532       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
3533       for (i=0; i<size; i++){
3534         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3535         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3536         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3537           vtxwgt_g[ncon*j] = 1;
3538           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3539         }
3540       }
3541       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
3542       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
3543       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3544       options[METIS_OPTION_CONTIG] = 1;
3545       PetscStackPush("METIS_PartGraphKway");
3546       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3547       PetscStackPop;
3548       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3549       ierr = PetscFree(options);CHKERRQ(ierr);
3550       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
3551       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
3552       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
3553     }
3554     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
3555 
3556     /* Now scatter the parts array. */
3557     {
3558       PetscMPIInt *counts, *mpiCumSumVertices;
3559       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
3560       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
3561       for(i=0; i<size; i++) {
3562         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3563       }
3564       for(i=0; i<=size; i++) {
3565         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3566       }
3567       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3568       ierr = PetscFree(counts);CHKERRQ(ierr);
3569       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3570     }
3571 
3572     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3573     ierr = MatDestroy(&As);CHKERRQ(ierr);
3574   }
3575 
3576   ierr = MatDestroy(&A);CHKERRQ(ierr);
3577   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3578   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3579 
3580   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3581 
3582   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3583   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3584   firstVertices[rank] = part[0];
3585   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3586   for (i=0; i<size; i++) {
3587     renumbering[firstVertices[i]] = i;
3588   }
3589   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3590     part[i] = renumbering[part[i]];
3591   }
3592   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3593   failed = (PetscInt)(part[0] != rank);
3594   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3595 
3596   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3597   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3598 
3599   if (failedGlobal > 0) {
3600     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3601     ierr = PetscFree(xadj);CHKERRQ(ierr);
3602     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3603     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3604     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3605     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3606     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3607     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3608     ierr = PetscFree(part);CHKERRQ(ierr);
3609     if (viewer) {
3610       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3611       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3612     }
3613     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3614     PetscFunctionReturn(0);
3615   }
3616 
3617   /*Let's check how well we did distributing points*/
3618   if (viewer) {
3619     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3620     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3621     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3622     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3623     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3624   }
3625 
3626   /* Now check that every vertex is owned by a process that it is actually connected to. */
3627   for (i=1; i<=numNonExclusivelyOwned; i++) {
3628     PetscInt loc = 0;
3629     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3630     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3631     if (loc<0) {
3632       part[i] = rank;
3633     }
3634   }
3635 
3636   /* Let's see how significant the influences of the previous fixing up step was.*/
3637   if (viewer) {
3638     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3639     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3640   }
3641 
3642   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3643   ierr = PetscFree(xadj);CHKERRQ(ierr);
3644   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3645   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3646 
3647   /* Almost done, now rewrite the SF to reflect the new ownership. */
3648   {
3649     PetscInt *pointsToRewrite;
3650     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3651     counter = 0;
3652     for(i=0; i<pEnd-pStart; i++) {
3653       if (toBalance[i]) {
3654         if (isNonExclusivelyOwned[i]) {
3655           pointsToRewrite[counter] = i + pStart;
3656           counter++;
3657         }
3658       }
3659     }
3660     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3661     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3662   }
3663 
3664   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3665   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3666   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3667   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3668   ierr = PetscFree(part);CHKERRQ(ierr);
3669   if (viewer) {
3670     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3671     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3672   }
3673   if (success) *success = PETSC_TRUE;
3674   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3675 #else
3676   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3677 #endif
3678   PetscFunctionReturn(0);
3679 }
3680