xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 76bd3646d776ac16fc835ea742fa097f15759704)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {https://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PetscBool PTScotchPartitionercite = PETSC_FALSE;
31 const char PTScotchPartitionerCitation[] =
32   "@article{PTSCOTCH,\n"
33   "  author  = {C. Chevalier and F. Pellegrini},\n"
34   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
35   "  journal = {Parallel Computing},\n"
36   "  volume  = {34},\n"
37   "  number  = {6},\n"
38   "  pages   = {318--331},\n"
39   "  year    = {2008},\n"
40   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
41   "}\n";
42 
43 
44 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
45 
46 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
47 {
48   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
49   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
50   IS             cellNumbering;
51   const PetscInt *cellNum;
52   PetscBool      useCone, useClosure;
53   PetscSection   section;
54   PetscSegBuffer adjBuffer;
55   PetscSF        sfPoint;
56   PetscInt       *adjCells = NULL, *remoteCells = NULL;
57   const PetscInt *local;
58   PetscInt       nroots, nleaves, l;
59   PetscMPIInt    rank;
60   PetscErrorCode ierr;
61 
62   PetscFunctionBegin;
63   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
64   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
65   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
66   if (dim != depth) {
67     /* We do not handle the uninterpolated case here */
68     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
69     /* DMPlexCreateNeighborCSR does not make a numbering */
70     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
71     /* Different behavior for empty graphs */
72     if (!*numVertices) {
73       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
74       (*offsets)[0] = 0;
75     }
76     /* Broken in parallel */
77     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
78     PetscFunctionReturn(0);
79   }
80   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
81   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
82   /* Build adjacency graph via a section/segbuffer */
83   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
84   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
85   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
86   /* Always use FVM adjacency to create partitioner graph */
87   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
88   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
89   ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
90   if (globalNumbering) {
91     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
92     *globalNumbering = cellNumbering;
93   }
94   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
95   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
96      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
97    */
98   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
99   if (nroots >= 0) {
100     PetscInt fStart, fEnd, f;
101 
102     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
103     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
104     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
105     for (f = fStart; f < fEnd; ++f) {
106       const PetscInt *support;
107       PetscInt        supportSize;
108 
109       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
110       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
111       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
112       else if (supportSize == 2) {
113         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
114         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
115         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
116         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
117       }
118       /* Handle non-conforming meshes */
119       if (supportSize > 2) {
120         PetscInt        numChildren, i;
121         const PetscInt *children;
122 
123         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
124         for (i = 0; i < numChildren; ++i) {
125           const PetscInt child = children[i];
126           if (fStart <= child && child < fEnd) {
127             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
128             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
129             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
130             else if (supportSize == 2) {
131               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
132               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
133               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
134               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
135             }
136           }
137         }
138       }
139     }
140     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
141     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
142     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
143     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
144     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
145   }
146   /* Combine local and global adjacencies */
147   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
148     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
149     if (nroots > 0) {if (cellNum[p] < 0) continue;}
150     /* Add remote cells */
151     if (remoteCells) {
152       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
153       PetscInt       coneSize, numChildren, c, i;
154       const PetscInt *cone, *children;
155 
156       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
157       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
158       for (c = 0; c < coneSize; ++c) {
159         const PetscInt point = cone[c];
160         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
161           PetscInt *PETSC_RESTRICT pBuf;
162           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
163           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
164           *pBuf = remoteCells[point];
165         }
166         /* Handle non-conforming meshes */
167         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
168         for (i = 0; i < numChildren; ++i) {
169           const PetscInt child = children[i];
170           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
171             PetscInt *PETSC_RESTRICT pBuf;
172             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
173             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
174             *pBuf = remoteCells[child];
175           }
176         }
177       }
178     }
179     /* Add local cells */
180     adjSize = PETSC_DETERMINE;
181     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
182     for (a = 0; a < adjSize; ++a) {
183       const PetscInt point = adj[a];
184       if (point != p && pStart <= point && point < pEnd) {
185         PetscInt *PETSC_RESTRICT pBuf;
186         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
187         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
188         *pBuf = DMPlex_GlobalID(cellNum[point]);
189       }
190     }
191     (*numVertices)++;
192   }
193   ierr = PetscFree(adj);CHKERRQ(ierr);
194   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
195   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
196 
197   /* Derive CSR graph from section/segbuffer */
198   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
199   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
200   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
201   for (idx = 0, p = pStart; p < pEnd; p++) {
202     if (nroots > 0) {if (cellNum[p] < 0) continue;}
203     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
204   }
205   vOffsets[*numVertices] = size;
206   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
207 
208   if (nroots >= 0) {
209     /* Filter out duplicate edges using section/segbuffer */
210     ierr = PetscSectionReset(section);CHKERRQ(ierr);
211     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
212     for (p = 0; p < *numVertices; p++) {
213       PetscInt start = vOffsets[p], end = vOffsets[p+1];
214       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
215       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
216       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
217       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
218       ierr = PetscArraycpy(edges, &graph[start], numEdges);CHKERRQ(ierr);
219     }
220     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
221     ierr = PetscFree(graph);CHKERRQ(ierr);
222     /* Derive CSR graph from section/segbuffer */
223     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
224     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
225     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
226     for (idx = 0, p = 0; p < *numVertices; p++) {
227       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
228     }
229     vOffsets[*numVertices] = size;
230     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
231   } else {
232     /* Sort adjacencies (not strictly necessary) */
233     for (p = 0; p < *numVertices; p++) {
234       PetscInt start = vOffsets[p], end = vOffsets[p+1];
235       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
236     }
237   }
238 
239   if (offsets) *offsets = vOffsets;
240   if (adjacency) *adjacency = graph;
241 
242   /* Cleanup */
243   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
244   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
245   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
246   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
247   PetscFunctionReturn(0);
248 }
249 
250 static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
251 {
252   Mat            conn, CSR;
253   IS             fis, cis, cis_own;
254   PetscSF        sfPoint;
255   const PetscInt *rows, *cols, *ii, *jj;
256   PetscInt       *idxs,*idxs2;
257   PetscInt       dim, depth, floc, cloc, i, M, N, c, lm, m, cStart, cEnd, fStart, fEnd;
258   PetscMPIInt    rank;
259   PetscBool      flg;
260   PetscErrorCode ierr;
261 
262   PetscFunctionBegin;
263   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
264   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
265   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
266   if (dim != depth) {
267     /* We do not handle the uninterpolated case here */
268     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
269     /* DMPlexCreateNeighborCSR does not make a numbering */
270     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
271     /* Different behavior for empty graphs */
272     if (!*numVertices) {
273       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
274       (*offsets)[0] = 0;
275     }
276     /* Broken in parallel */
277     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
278     PetscFunctionReturn(0);
279   }
280   /* Interpolated and parallel case */
281 
282   /* numbering */
283   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
284   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
285   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
286   ierr = DMPlexCreateNumbering_Plex(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
287   ierr = DMPlexCreateNumbering_Plex(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
288   if (globalNumbering) {
289     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
290   }
291 
292   /* get positive global ids and local sizes for facets and cells */
293   ierr = ISGetLocalSize(fis, &m);CHKERRQ(ierr);
294   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
295   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
296   for (i = 0, floc = 0; i < m; i++) {
297     const PetscInt p = rows[i];
298 
299     if (p < 0) {
300       idxs[i] = -(p+1);
301     } else {
302       idxs[i] = p;
303       floc   += 1;
304     }
305   }
306   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
307   ierr = ISDestroy(&fis);CHKERRQ(ierr);
308   ierr = ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);CHKERRQ(ierr);
309 
310   ierr = ISGetLocalSize(cis, &m);CHKERRQ(ierr);
311   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
312   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
313   ierr = PetscMalloc1(m, &idxs2);CHKERRQ(ierr);
314   for (i = 0, cloc = 0; i < m; i++) {
315     const PetscInt p = cols[i];
316 
317     if (p < 0) {
318       idxs[i] = -(p+1);
319     } else {
320       idxs[i]       = p;
321       idxs2[cloc++] = p;
322     }
323   }
324   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
325   ierr = ISDestroy(&cis);CHKERRQ(ierr);
326   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);CHKERRQ(ierr);
327   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);CHKERRQ(ierr);
328 
329   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
330   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
331   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
332   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
333   ierr = DMPlexGetMaxSizes(dm, NULL, &lm);CHKERRQ(ierr);
334   ierr = MPI_Allreduce(&lm, &m, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject) dm));CHKERRQ(ierr);
335   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
336 
337   /* Assemble matrix */
338   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
339   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
340   for (c = cStart; c < cEnd; c++) {
341     const PetscInt *cone;
342     PetscInt        coneSize, row, col, f;
343 
344     col  = cols[c-cStart];
345     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
346     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
347     for (f = 0; f < coneSize; f++) {
348       const PetscScalar v = 1.0;
349       const PetscInt *children;
350       PetscInt        numChildren, ch;
351 
352       row  = rows[cone[f]-fStart];
353       ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
354 
355       /* non-conforming meshes */
356       ierr = DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);CHKERRQ(ierr);
357       for (ch = 0; ch < numChildren; ch++) {
358         const PetscInt child = children[ch];
359 
360         if (child < fStart || child >= fEnd) continue;
361         row  = rows[child-fStart];
362         ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
363       }
364     }
365   }
366   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
367   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
368   ierr = ISDestroy(&fis);CHKERRQ(ierr);
369   ierr = ISDestroy(&cis);CHKERRQ(ierr);
370   ierr = MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
371   ierr = MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
372 
373   /* Get parallel CSR by doing conn^T * conn */
374   ierr = MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);CHKERRQ(ierr);
375   ierr = MatDestroy(&conn);CHKERRQ(ierr);
376 
377   /* extract local part of the CSR */
378   ierr = MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);CHKERRQ(ierr);
379   ierr = MatDestroy(&CSR);CHKERRQ(ierr);
380   ierr = MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
381   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
382 
383   /* get back requested output */
384   if (numVertices) *numVertices = m;
385   if (offsets) {
386     ierr = PetscCalloc1(m+1, &idxs);CHKERRQ(ierr);
387     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
388     *offsets = idxs;
389   }
390   if (adjacency) {
391     ierr = PetscMalloc1(ii[m] - m, &idxs);CHKERRQ(ierr);
392     ierr = ISGetIndices(cis_own, &rows);CHKERRQ(ierr);
393     for (i = 0, c = 0; i < m; i++) {
394       PetscInt j, g = rows[i];
395 
396       for (j = ii[i]; j < ii[i+1]; j++) {
397         if (jj[j] == g) continue; /* again, self-connectivity */
398         idxs[c++] = jj[j];
399       }
400     }
401     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
402     ierr = ISRestoreIndices(cis_own, &rows);CHKERRQ(ierr);
403     *adjacency = idxs;
404   }
405 
406   /* cleanup */
407   ierr = ISDestroy(&cis_own);CHKERRQ(ierr);
408   ierr = MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
409   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
410   ierr = MatDestroy(&conn);CHKERRQ(ierr);
411   PetscFunctionReturn(0);
412 }
413 
414 /*@C
415   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
416 
417   Input Parameters:
418 + dm      - The mesh DM dm
419 - height  - Height of the strata from which to construct the graph
420 
421   Output Parameter:
422 + numVertices     - Number of vertices in the graph
423 . offsets         - Point offsets in the graph
424 . adjacency       - Point connectivity in the graph
425 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
426 
427   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
428   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().
429 
430   Level: developer
431 
432 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
433 @*/
434 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
435 {
436   PetscErrorCode ierr;
437   PetscBool      usemat = PETSC_FALSE;
438 
439   PetscFunctionBegin;
440   ierr = PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);CHKERRQ(ierr);
441   if (usemat) {
442     ierr = DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
443   } else {
444     ierr = DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
445   }
446   PetscFunctionReturn(0);
447 }
448 
449 /*@C
450   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
451 
452   Collective on DM
453 
454   Input Arguments:
455 + dm - The DMPlex
456 - cellHeight - The height of mesh points to treat as cells (default should be 0)
457 
458   Output Arguments:
459 + numVertices - The number of local vertices in the graph, or cells in the mesh.
460 . offsets     - The offset to the adjacency list for each cell
461 - adjacency   - The adjacency list for all cells
462 
463   Note: This is suitable for input to a mesh partitioner like ParMetis.
464 
465   Level: advanced
466 
467 .seealso: DMPlexCreate()
468 @*/
469 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
470 {
471   const PetscInt maxFaceCases = 30;
472   PetscInt       numFaceCases = 0;
473   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
474   PetscInt      *off, *adj;
475   PetscInt      *neighborCells = NULL;
476   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
477   PetscErrorCode ierr;
478 
479   PetscFunctionBegin;
480   /* For parallel partitioning, I think you have to communicate supports */
481   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
482   cellDim = dim - cellHeight;
483   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
484   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
485   if (cEnd - cStart == 0) {
486     if (numVertices) *numVertices = 0;
487     if (offsets)   *offsets   = NULL;
488     if (adjacency) *adjacency = NULL;
489     PetscFunctionReturn(0);
490   }
491   numCells  = cEnd - cStart;
492   faceDepth = depth - cellHeight;
493   if (dim == depth) {
494     PetscInt f, fStart, fEnd;
495 
496     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
497     /* Count neighboring cells */
498     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
499     for (f = fStart; f < fEnd; ++f) {
500       const PetscInt *support;
501       PetscInt        supportSize;
502       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
503       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
504       if (supportSize == 2) {
505         PetscInt numChildren;
506 
507         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
508         if (!numChildren) {
509           ++off[support[0]-cStart+1];
510           ++off[support[1]-cStart+1];
511         }
512       }
513     }
514     /* Prefix sum */
515     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
516     if (adjacency) {
517       PetscInt *tmp;
518 
519       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
520       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
521       ierr = PetscArraycpy(tmp, off, numCells+1);CHKERRQ(ierr);
522       /* Get neighboring cells */
523       for (f = fStart; f < fEnd; ++f) {
524         const PetscInt *support;
525         PetscInt        supportSize;
526         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
527         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
528         if (supportSize == 2) {
529           PetscInt numChildren;
530 
531           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
532           if (!numChildren) {
533             adj[tmp[support[0]-cStart]++] = support[1];
534             adj[tmp[support[1]-cStart]++] = support[0];
535           }
536         }
537       }
538       if (PetscDefined(USE_DEBUG)) {
539         for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
540       }
541       ierr = PetscFree(tmp);CHKERRQ(ierr);
542     }
543     if (numVertices) *numVertices = numCells;
544     if (offsets)   *offsets   = off;
545     if (adjacency) *adjacency = adj;
546     PetscFunctionReturn(0);
547   }
548   /* Setup face recognition */
549   if (faceDepth == 1) {
550     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
551 
552     for (c = cStart; c < cEnd; ++c) {
553       PetscInt corners;
554 
555       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
556       if (!cornersSeen[corners]) {
557         PetscInt nFV;
558 
559         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
560         cornersSeen[corners] = 1;
561 
562         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
563 
564         numFaceVertices[numFaceCases++] = nFV;
565       }
566     }
567   }
568   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
569   /* Count neighboring cells */
570   for (cell = cStart; cell < cEnd; ++cell) {
571     PetscInt numNeighbors = PETSC_DETERMINE, n;
572 
573     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
574     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
575     for (n = 0; n < numNeighbors; ++n) {
576       PetscInt        cellPair[2];
577       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
578       PetscInt        meetSize = 0;
579       const PetscInt *meet    = NULL;
580 
581       cellPair[0] = cell; cellPair[1] = neighborCells[n];
582       if (cellPair[0] == cellPair[1]) continue;
583       if (!found) {
584         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
585         if (meetSize) {
586           PetscInt f;
587 
588           for (f = 0; f < numFaceCases; ++f) {
589             if (numFaceVertices[f] == meetSize) {
590               found = PETSC_TRUE;
591               break;
592             }
593           }
594         }
595         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
596       }
597       if (found) ++off[cell-cStart+1];
598     }
599   }
600   /* Prefix sum */
601   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
602 
603   if (adjacency) {
604     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
605     /* Get neighboring cells */
606     for (cell = cStart; cell < cEnd; ++cell) {
607       PetscInt numNeighbors = PETSC_DETERMINE, n;
608       PetscInt cellOffset   = 0;
609 
610       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
611       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
612       for (n = 0; n < numNeighbors; ++n) {
613         PetscInt        cellPair[2];
614         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
615         PetscInt        meetSize = 0;
616         const PetscInt *meet    = NULL;
617 
618         cellPair[0] = cell; cellPair[1] = neighborCells[n];
619         if (cellPair[0] == cellPair[1]) continue;
620         if (!found) {
621           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
622           if (meetSize) {
623             PetscInt f;
624 
625             for (f = 0; f < numFaceCases; ++f) {
626               if (numFaceVertices[f] == meetSize) {
627                 found = PETSC_TRUE;
628                 break;
629               }
630             }
631           }
632           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
633         }
634         if (found) {
635           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
636           ++cellOffset;
637         }
638       }
639     }
640   }
641   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
642   if (numVertices) *numVertices = numCells;
643   if (offsets)   *offsets   = off;
644   if (adjacency) *adjacency = adj;
645   PetscFunctionReturn(0);
646 }
647 
648 /*@C
649   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
650 
651   Not Collective
652 
653   Input Parameters:
654 + name        - The name of a new user-defined creation routine
655 - create_func - The creation routine itself
656 
657   Notes:
658   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
659 
660   Sample usage:
661 .vb
662     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
663 .ve
664 
665   Then, your PetscPartitioner type can be chosen with the procedural interface via
666 .vb
667     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
668     PetscPartitionerSetType(PetscPartitioner, "my_part");
669 .ve
670    or at runtime via the option
671 .vb
672     -petscpartitioner_type my_part
673 .ve
674 
675   Level: advanced
676 
677 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
678 
679 @*/
680 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
681 {
682   PetscErrorCode ierr;
683 
684   PetscFunctionBegin;
685   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
686   PetscFunctionReturn(0);
687 }
688 
689 /*@C
690   PetscPartitionerSetType - Builds a particular PetscPartitioner
691 
692   Collective on PetscPartitioner
693 
694   Input Parameters:
695 + part - The PetscPartitioner object
696 - name - The kind of partitioner
697 
698   Options Database Key:
699 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
700 
701   Level: intermediate
702 
703 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
704 @*/
705 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
706 {
707   PetscErrorCode (*r)(PetscPartitioner);
708   PetscBool      match;
709   PetscErrorCode ierr;
710 
711   PetscFunctionBegin;
712   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
713   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
714   if (match) PetscFunctionReturn(0);
715 
716   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
717   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
718 
719   if (part->ops->destroy) {
720     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
721   }
722   part->noGraph = PETSC_FALSE;
723   ierr = PetscMemzero(part->ops, sizeof(*part->ops));CHKERRQ(ierr);
724   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
725   ierr = (*r)(part);CHKERRQ(ierr);
726   PetscFunctionReturn(0);
727 }
728 
729 /*@C
730   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
731 
732   Not Collective
733 
734   Input Parameter:
735 . part - The PetscPartitioner
736 
737   Output Parameter:
738 . name - The PetscPartitioner type name
739 
740   Level: intermediate
741 
742 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
743 @*/
744 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
745 {
746   PetscFunctionBegin;
747   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
748   PetscValidPointer(name, 2);
749   *name = ((PetscObject) part)->type_name;
750   PetscFunctionReturn(0);
751 }
752 
753 /*@C
754    PetscPartitionerViewFromOptions - View from Options
755 
756    Collective on PetscPartitioner
757 
758    Input Parameters:
759 +  A - the PetscPartitioner object
760 .  obj - Optional object
761 -  name - command line option
762 
763    Level: intermediate
764 .seealso:  PetscPartitionerView(), PetscObjectViewFromOptions()
765 @*/
766 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner A,PetscObject obj,const char name[])
767 {
768   PetscErrorCode ierr;
769 
770   PetscFunctionBegin;
771   PetscValidHeaderSpecific(A,PETSCPARTITIONER_CLASSID,1);
772   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
773   PetscFunctionReturn(0);
774 }
775 
776 /*@C
777   PetscPartitionerView - Views a PetscPartitioner
778 
779   Collective on PetscPartitioner
780 
781   Input Parameter:
782 + part - the PetscPartitioner object to view
783 - v    - the viewer
784 
785   Level: developer
786 
787 .seealso: PetscPartitionerDestroy()
788 @*/
789 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
790 {
791   PetscMPIInt    size;
792   PetscBool      isascii;
793   PetscErrorCode ierr;
794 
795   PetscFunctionBegin;
796   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
797   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
798   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
799   if (isascii) {
800     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
801     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
802     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
803     ierr = PetscViewerASCIIPrintf(v, "  edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
804     ierr = PetscViewerASCIIPrintf(v, "  balance: %.2g\n", part->balance);CHKERRQ(ierr);
805     ierr = PetscViewerASCIIPrintf(v, "  use vertex weights: %d\n", part->usevwgt);CHKERRQ(ierr);
806   }
807   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
808   PetscFunctionReturn(0);
809 }
810 
811 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
812 {
813   PetscFunctionBegin;
814   if (!currentType) {
815 #if defined(PETSC_HAVE_PARMETIS)
816     *defaultType = PETSCPARTITIONERPARMETIS;
817 #elif defined(PETSC_HAVE_PTSCOTCH)
818     *defaultType = PETSCPARTITIONERPTSCOTCH;
819 #elif defined(PETSC_HAVE_CHACO)
820     *defaultType = PETSCPARTITIONERCHACO;
821 #else
822     *defaultType = PETSCPARTITIONERSIMPLE;
823 #endif
824   } else {
825     *defaultType = currentType;
826   }
827   PetscFunctionReturn(0);
828 }
829 
830 /*@
831   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
832 
833   Collective on PetscPartitioner
834 
835   Input Parameter:
836 . part - the PetscPartitioner object to set options for
837 
838   Options Database Keys:
839 +  -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
840 .  -petscpartitioner_use_vertex_weights - Uses weights associated with the graph vertices
841 -  -petscpartitioner_view_graph - View the graph each time PetscPartitionerPartition is called. Viewer can be customized, see PetscOptionsGetViewer()
842 
843   Level: developer
844 
845 .seealso: PetscPartitionerView(), PetscPartitionerSetType(), PetscPartitionerPartition()
846 @*/
847 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
848 {
849   const char    *defaultType;
850   char           name[256];
851   PetscBool      flg;
852   PetscErrorCode ierr;
853 
854   PetscFunctionBegin;
855   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
856   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
857   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
858   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
859   if (flg) {
860     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
861   } else if (!((PetscObject) part)->type_name) {
862     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
863   }
864   ierr = PetscOptionsBool("-petscpartitioner_use_vertex_weights","Use vertex weights","",part->usevwgt,&part->usevwgt,NULL);CHKERRQ(ierr);
865   if (part->ops->setfromoptions) {
866     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
867   }
868   ierr = PetscViewerDestroy(&part->viewer);CHKERRQ(ierr);
869   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
870   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view", &part->viewer, NULL, NULL);CHKERRQ(ierr);
871   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, NULL, &part->viewGraph);CHKERRQ(ierr);
872   /* process any options handlers added with PetscObjectAddOptionsHandler() */
873   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
874   ierr = PetscOptionsEnd();CHKERRQ(ierr);
875   PetscFunctionReturn(0);
876 }
877 
878 /*@C
879   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
880 
881   Collective on PetscPartitioner
882 
883   Input Parameter:
884 . part - the PetscPartitioner object to setup
885 
886   Level: developer
887 
888 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
889 @*/
890 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
891 {
892   PetscErrorCode ierr;
893 
894   PetscFunctionBegin;
895   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
896   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
897   PetscFunctionReturn(0);
898 }
899 
900 /*@
901   PetscPartitionerDestroy - Destroys a PetscPartitioner object
902 
903   Collective on PetscPartitioner
904 
905   Input Parameter:
906 . part - the PetscPartitioner object to destroy
907 
908   Level: developer
909 
910 .seealso: PetscPartitionerView()
911 @*/
912 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
913 {
914   PetscErrorCode ierr;
915 
916   PetscFunctionBegin;
917   if (!*part) PetscFunctionReturn(0);
918   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
919 
920   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
921   ((PetscObject) (*part))->refct = 0;
922 
923   ierr = PetscViewerDestroy(&(*part)->viewer);CHKERRQ(ierr);
924   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
925   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
926   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
927   PetscFunctionReturn(0);
928 }
929 
930 /*@
931   PetscPartitionerPartition - Partition a graph
932 
933   Collective on PetscPartitioner
934 
935   Input Parameters:
936 + part    - The PetscPartitioner
937 . nparts  - Number of partitions
938 . numVertices - Number of vertices in the local part of the graph
939 . start - row pointers for the local part of the graph (CSR style)
940 . adjacency - adjacency list (CSR style)
941 . vertexSection - PetscSection describing the absolute weight of each local vertex (can be NULL)
942 - targetSection - PetscSection describing the absolute weight of each partition (can be NULL)
943 
944   Output Parameters:
945 + partSection     - The PetscSection giving the division of points by partition
946 - partition       - The list of points by partition
947 
948   Options Database:
949 + -petscpartitioner_view - View the partitioner information
950 - -petscpartitioner_view_graph - View the graph we are partitioning
951 
952   Notes:
953     The chart of the vertexSection (if present) must contain [0,numVertices), with the number of dofs in the section specifying the absolute weight for each vertex.
954     The chart of the targetSection (if present) must contain [0,nparts), with the number of dofs in the section specifying the absolute weight for each partition. This information must be the same across processes, PETSc does not check it.
955 
956   Level: developer
957 
958 .seealso PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscSectionSetDof()
959 @*/
960 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertexSection, PetscSection targetSection, PetscSection partSection, IS *partition)
961 {
962   PetscErrorCode ierr;
963 
964   PetscFunctionBegin;
965   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
966   PetscValidLogicalCollectiveInt(part, nparts, 2);
967   if (nparts <= 0) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Number of parts must be positive");
968   if (numVertices < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices must be non-negative");
969   if (numVertices && !part->noGraph) {
970     PetscValidIntPointer(start, 4);
971     PetscValidIntPointer(start + numVertices, 4);
972     if (start[numVertices]) PetscValidIntPointer(adjacency, 5);
973   }
974   if (vertexSection) {
975     PetscInt s,e;
976 
977     PetscValidHeaderSpecific(vertexSection, PETSC_SECTION_CLASSID, 6);
978     ierr = PetscSectionGetChart(vertexSection, &s, &e);CHKERRQ(ierr);
979     if (s > 0 || e < numVertices) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid vertexSection chart [%D,%D)",s,e);
980   }
981   if (targetSection) {
982     PetscInt s,e;
983 
984     PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 7);
985     ierr = PetscSectionGetChart(targetSection, &s, &e);CHKERRQ(ierr);
986     if (s > 0 || e < nparts) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid targetSection chart [%D,%D)",s,e);
987   }
988   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 8);
989   PetscValidPointer(partition, 9);
990 
991   ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
992   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
993   if (nparts == 1) { /* quick */
994     ierr = PetscSectionSetDof(partSection, 0, numVertices);CHKERRQ(ierr);
995     ierr = ISCreateStride(PetscObjectComm((PetscObject)part),numVertices,0,1,partition);CHKERRQ(ierr);
996   } else {
997     if (!part->ops->partition) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "PetscPartitioner %s has no partitioning method", ((PetscObject)part)->type_name);
998     ierr = (*part->ops->partition)(part, nparts, numVertices, start, adjacency, vertexSection, targetSection, partSection, partition);CHKERRQ(ierr);
999   }
1000   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1001   if (part->viewerGraph) {
1002     PetscViewer viewer = part->viewerGraph;
1003     PetscBool   isascii;
1004     PetscInt    v, i;
1005     PetscMPIInt rank;
1006 
1007     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
1008     ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
1009     if (isascii) {
1010       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1011       ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
1012       for (v = 0; v < numVertices; ++v) {
1013         const PetscInt s = start[v];
1014         const PetscInt e = start[v+1];
1015 
1016         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
1017         for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
1018         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
1019       }
1020       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1021       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1022     }
1023   }
1024   if (part->viewer) {
1025     ierr = PetscPartitionerView(part,part->viewer);CHKERRQ(ierr);
1026   }
1027   PetscFunctionReturn(0);
1028 }
1029 
1030 /*@
1031   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
1032 
1033   Collective
1034 
1035   Input Parameter:
1036 . comm - The communicator for the PetscPartitioner object
1037 
1038   Output Parameter:
1039 . part - The PetscPartitioner object
1040 
1041   Level: beginner
1042 
1043 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
1044 @*/
1045 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
1046 {
1047   PetscPartitioner p;
1048   const char       *partitionerType = NULL;
1049   PetscErrorCode   ierr;
1050 
1051   PetscFunctionBegin;
1052   PetscValidPointer(part, 2);
1053   *part = NULL;
1054   ierr = DMInitializePackage();CHKERRQ(ierr);
1055 
1056   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
1057   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
1058   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
1059 
1060   p->edgeCut = 0;
1061   p->balance = 0.0;
1062   p->usevwgt = PETSC_TRUE;
1063 
1064   *part = p;
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 /*@
1069   PetscPartitionerDMPlexPartition - Create a non-overlapping partition of the cells in the mesh
1070 
1071   Collective on PetscPartitioner
1072 
1073   Input Parameters:
1074 + part    - The PetscPartitioner
1075 . targetSection - The PetscSection describing the absolute weight of each partition (can be NULL)
1076 - dm      - The mesh DM
1077 
1078   Output Parameters:
1079 + partSection     - The PetscSection giving the division of points by partition
1080 - partition       - The list of points by partition
1081 
1082   Notes:
1083     If the DM has a local section associated, each point to be partitioned will be weighted by the total number of dofs identified
1084     by the section in the transitive closure of the point.
1085 
1086   Level: developer
1087 
1088 .seealso DMPlexDistribute(), PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscPartitionerPartition()
1089 @*/
1090 PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner part, DM dm, PetscSection targetSection, PetscSection partSection, IS *partition)
1091 {
1092   PetscMPIInt    size;
1093   PetscBool      isplex;
1094   PetscErrorCode ierr;
1095   PetscSection   vertSection = NULL;
1096 
1097   PetscFunctionBegin;
1098   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1099   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
1100   if (targetSection) PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 3);
1101   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
1102   PetscValidPointer(partition, 5);
1103   ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&isplex);CHKERRQ(ierr);
1104   if (!isplex) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Not for type %s",((PetscObject)dm)->type_name);
1105   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
1106   if (size == 1) {
1107     PetscInt *points;
1108     PetscInt  cStart, cEnd, c;
1109 
1110     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1111     ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
1112     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
1113     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
1114     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1115     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
1116     for (c = cStart; c < cEnd; ++c) points[c] = c;
1117     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1118     PetscFunctionReturn(0);
1119   }
1120   if (part->height == 0) {
1121     PetscInt numVertices = 0;
1122     PetscInt *start     = NULL;
1123     PetscInt *adjacency = NULL;
1124     IS       globalNumbering;
1125 
1126     if (!part->noGraph || part->viewGraph) {
1127       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
1128     } else { /* only compute the number of owned local vertices */
1129       const PetscInt *idxs;
1130       PetscInt       p, pStart, pEnd;
1131 
1132       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1133       ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
1134       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1135       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
1136       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1137     }
1138     if (part->usevwgt) {
1139       PetscSection   section = dm->localSection, clSection = NULL;
1140       IS             clPoints = NULL;
1141       const PetscInt *gid,*clIdx;
1142       PetscInt       v, p, pStart, pEnd;
1143 
1144       /* dm->localSection encodes degrees of freedom per point, not per cell. We need to get the closure index to properly specify cell weights (aka dofs) */
1145       /* We do this only if the local section has been set */
1146       if (section) {
1147         ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, NULL);CHKERRQ(ierr);
1148         if (!clSection) {
1149           ierr = DMPlexCreateClosureIndex(dm,NULL);CHKERRQ(ierr);
1150         }
1151         ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, &clPoints);CHKERRQ(ierr);
1152         ierr = ISGetIndices(clPoints,&clIdx);CHKERRQ(ierr);
1153       }
1154       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1155       ierr = PetscSectionCreate(PETSC_COMM_SELF, &vertSection);CHKERRQ(ierr);
1156       ierr = PetscSectionSetChart(vertSection, 0, numVertices);CHKERRQ(ierr);
1157       if (globalNumbering) {
1158         ierr = ISGetIndices(globalNumbering,&gid);CHKERRQ(ierr);
1159       } else gid = NULL;
1160       for (p = pStart, v = 0; p < pEnd; ++p) {
1161         PetscInt dof = 1;
1162 
1163         /* skip cells in the overlap */
1164         if (gid && gid[p-pStart] < 0) continue;
1165 
1166         if (section) {
1167           PetscInt cl, clSize, clOff;
1168 
1169           dof  = 0;
1170           ierr = PetscSectionGetDof(clSection, p, &clSize);CHKERRQ(ierr);
1171           ierr = PetscSectionGetOffset(clSection, p, &clOff);CHKERRQ(ierr);
1172           for (cl = 0; cl < clSize; cl+=2) {
1173             PetscInt clDof, clPoint = clIdx[clOff + cl]; /* odd indices are reserved for orientations */
1174 
1175             ierr = PetscSectionGetDof(section, clPoint, &clDof);CHKERRQ(ierr);
1176             dof += clDof;
1177           }
1178         }
1179         if (!dof) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of dofs for point %D in the local section should be positive",p);
1180         ierr = PetscSectionSetDof(vertSection, v, dof);CHKERRQ(ierr);
1181         v++;
1182       }
1183       if (globalNumbering) {
1184         ierr = ISRestoreIndices(globalNumbering,&gid);CHKERRQ(ierr);
1185       }
1186       if (clPoints) {
1187         ierr = ISRestoreIndices(clPoints,&clIdx);CHKERRQ(ierr);
1188       }
1189       ierr = PetscSectionSetUp(vertSection);CHKERRQ(ierr);
1190     }
1191     ierr = PetscPartitionerPartition(part, size, numVertices, start, adjacency, vertSection, targetSection, partSection, partition);CHKERRQ(ierr);
1192     ierr = PetscFree(start);CHKERRQ(ierr);
1193     ierr = PetscFree(adjacency);CHKERRQ(ierr);
1194     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1195       const PetscInt *globalNum;
1196       const PetscInt *partIdx;
1197       PetscInt       *map, cStart, cEnd;
1198       PetscInt       *adjusted, i, localSize, offset;
1199       IS             newPartition;
1200 
1201       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
1202       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
1203       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1204       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
1205       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
1206       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1207       for (i = cStart, offset = 0; i < cEnd; i++) {
1208         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1209       }
1210       for (i = 0; i < localSize; i++) {
1211         adjusted[i] = map[partIdx[i]];
1212       }
1213       ierr = PetscFree(map);CHKERRQ(ierr);
1214       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
1215       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1216       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
1217       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
1218       ierr = ISDestroy(partition);CHKERRQ(ierr);
1219       *partition = newPartition;
1220     }
1221   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1222   ierr = PetscSectionDestroy(&vertSection);CHKERRQ(ierr);
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1227 {
1228   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1229   PetscErrorCode          ierr;
1230 
1231   PetscFunctionBegin;
1232   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1233   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1234   ierr = PetscFree(p);CHKERRQ(ierr);
1235   PetscFunctionReturn(0);
1236 }
1237 
1238 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1239 {
1240   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1241   PetscErrorCode          ierr;
1242 
1243   PetscFunctionBegin;
1244   if (p->random) {
1245     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1246     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
1247     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1248   }
1249   PetscFunctionReturn(0);
1250 }
1251 
1252 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1253 {
1254   PetscBool      iascii;
1255   PetscErrorCode ierr;
1256 
1257   PetscFunctionBegin;
1258   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1259   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1260   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1261   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1266 {
1267   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1268   PetscErrorCode          ierr;
1269 
1270   PetscFunctionBegin;
1271   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
1272   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
1273   ierr = PetscOptionsTail();CHKERRQ(ierr);
1274   PetscFunctionReturn(0);
1275 }
1276 
1277 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1278 {
1279   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1280   PetscInt                np;
1281   PetscErrorCode          ierr;
1282 
1283   PetscFunctionBegin;
1284   if (p->random) {
1285     PetscRandom r;
1286     PetscInt   *sizes, *points, v, p;
1287     PetscMPIInt rank;
1288 
1289     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) part), &rank);CHKERRQ(ierr);
1290     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1291     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
1292     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
1293     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
1294     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1295     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1296     for (v = numVertices-1; v > 0; --v) {
1297       PetscReal val;
1298       PetscInt  w, tmp;
1299 
1300       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
1301       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
1302       w    = PetscFloorReal(val);
1303       tmp       = points[v];
1304       points[v] = points[w];
1305       points[w] = tmp;
1306     }
1307     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1308     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
1309     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
1310   }
1311   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1312   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
1313   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1314   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
1315   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1316   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
1317   *partition = p->partition;
1318   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1323 {
1324   PetscFunctionBegin;
1325   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1326   part->ops->view           = PetscPartitionerView_Shell;
1327   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1328   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1329   part->ops->partition      = PetscPartitionerPartition_Shell;
1330   PetscFunctionReturn(0);
1331 }
1332 
1333 /*MC
1334   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
1335 
1336   Level: intermediate
1337 
1338   Options Database Keys:
1339 .  -petscpartitioner_shell_random - Use a random partition
1340 
1341 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1342 M*/
1343 
1344 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1345 {
1346   PetscPartitioner_Shell *p;
1347   PetscErrorCode          ierr;
1348 
1349   PetscFunctionBegin;
1350   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1351   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1352   part->data = p;
1353 
1354   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1355   p->random = PETSC_FALSE;
1356   PetscFunctionReturn(0);
1357 }
1358 
1359 /*@C
1360   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1361 
1362   Collective on PetscPartitioner
1363 
1364   Input Parameters:
1365 + part   - The PetscPartitioner
1366 . size   - The number of partitions
1367 . sizes  - array of length size (or NULL) providing the number of points in each partition
1368 - points - array of length sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1369 
1370   Level: developer
1371 
1372   Notes:
1373     It is safe to free the sizes and points arrays after use in this routine.
1374 
1375 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1376 @*/
1377 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1378 {
1379   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1380   PetscInt                proc, numPoints;
1381   PetscErrorCode          ierr;
1382 
1383   PetscFunctionBegin;
1384   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1385   if (sizes)  {PetscValidPointer(sizes, 3);}
1386   if (points) {PetscValidPointer(points, 4);}
1387   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1388   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1389   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1390   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1391   if (sizes) {
1392     for (proc = 0; proc < size; ++proc) {
1393       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1394     }
1395   }
1396   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1397   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1398   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1399   PetscFunctionReturn(0);
1400 }
1401 
1402 /*@
1403   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1404 
1405   Collective on PetscPartitioner
1406 
1407   Input Parameters:
1408 + part   - The PetscPartitioner
1409 - random - The flag to use a random partition
1410 
1411   Level: intermediate
1412 
1413 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1414 @*/
1415 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1416 {
1417   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1418 
1419   PetscFunctionBegin;
1420   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1421   p->random = random;
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 /*@
1426   PetscPartitionerShellGetRandom - get the flag to use a random partition
1427 
1428   Collective on PetscPartitioner
1429 
1430   Input Parameter:
1431 . part   - The PetscPartitioner
1432 
1433   Output Parameter:
1434 . random - The flag to use a random partition
1435 
1436   Level: intermediate
1437 
1438 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1439 @*/
1440 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1441 {
1442   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1443 
1444   PetscFunctionBegin;
1445   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1446   PetscValidPointer(random, 2);
1447   *random = p->random;
1448   PetscFunctionReturn(0);
1449 }
1450 
1451 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1452 {
1453   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1454   PetscErrorCode          ierr;
1455 
1456   PetscFunctionBegin;
1457   ierr = PetscFree(p);CHKERRQ(ierr);
1458   PetscFunctionReturn(0);
1459 }
1460 
1461 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1462 {
1463   PetscFunctionBegin;
1464   PetscFunctionReturn(0);
1465 }
1466 
1467 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1468 {
1469   PetscBool      iascii;
1470   PetscErrorCode ierr;
1471 
1472   PetscFunctionBegin;
1473   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1474   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1475   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1476   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1481 {
1482   MPI_Comm       comm;
1483   PetscInt       np, *tpwgts = NULL, sumw = 0, numVerticesGlobal  = 0;
1484   PetscMPIInt    size;
1485   PetscErrorCode ierr;
1486 
1487   PetscFunctionBegin;
1488   if (vertSection) { ierr = PetscInfo(part,"PETSCPARTITIONERSIMPLE ignores vertex weights\n");CHKERRQ(ierr); }
1489   comm = PetscObjectComm((PetscObject)part);
1490   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1491   if (targetSection) {
1492     ierr = MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
1493     ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
1494     for (np = 0; np < nparts; ++np) {
1495       ierr = PetscSectionGetDof(targetSection,np,&tpwgts[np]);CHKERRQ(ierr);
1496       sumw += tpwgts[np];
1497     }
1498     if (!sumw) {
1499       ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1500     } else {
1501       PetscInt m,mp;
1502       for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np]*numVerticesGlobal)/sumw;
1503       for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
1504         if (m < tpwgts[np]) { m = tpwgts[np]; mp = np; }
1505         sumw += tpwgts[np];
1506       }
1507       if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
1508     }
1509   }
1510 
1511   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1512   if (size == 1) {
1513     if (tpwgts) {
1514       for (np = 0; np < nparts; ++np) {
1515         ierr = PetscSectionSetDof(partSection, np, tpwgts[np]);CHKERRQ(ierr);
1516       }
1517     } else {
1518       for (np = 0; np < nparts; ++np) {
1519         ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);
1520       }
1521     }
1522   } else {
1523     if (tpwgts) {
1524       Vec         v;
1525       PetscScalar *array;
1526       PetscInt    st,j;
1527       PetscMPIInt rank;
1528 
1529       ierr = VecCreate(comm,&v);CHKERRQ(ierr);
1530       ierr = VecSetSizes(v,numVertices,numVerticesGlobal);CHKERRQ(ierr);
1531       ierr = VecSetType(v,VECSTANDARD);CHKERRQ(ierr);
1532       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1533       for (np = 0,st = 0; np < nparts; ++np) {
1534         if (rank == np || (rank == size-1 && size < nparts && np >= size)) {
1535           for (j = 0; j < tpwgts[np]; j++) {
1536             ierr = VecSetValue(v,st+j,np,INSERT_VALUES);CHKERRQ(ierr);
1537           }
1538         }
1539         st += tpwgts[np];
1540       }
1541       ierr = VecAssemblyBegin(v);CHKERRQ(ierr);
1542       ierr = VecAssemblyEnd(v);CHKERRQ(ierr);
1543       ierr = VecGetArray(v,&array);CHKERRQ(ierr);
1544       for (j = 0; j < numVertices; ++j) {
1545         ierr = PetscSectionAddDof(partSection,PetscRealPart(array[j]),1);CHKERRQ(ierr);
1546       }
1547       ierr = VecRestoreArray(v,&array);CHKERRQ(ierr);
1548       ierr = VecDestroy(&v);CHKERRQ(ierr);
1549     } else {
1550       PetscMPIInt rank;
1551       PetscInt nvGlobal, *offsets, myFirst, myLast;
1552 
1553       ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1554       offsets[0] = 0;
1555       ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1556       for (np = 2; np <= size; np++) {
1557         offsets[np] += offsets[np-1];
1558       }
1559       nvGlobal = offsets[size];
1560       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1561       myFirst = offsets[rank];
1562       myLast  = offsets[rank + 1] - 1;
1563       ierr = PetscFree(offsets);CHKERRQ(ierr);
1564       if (numVertices) {
1565         PetscInt firstPart = 0, firstLargePart = 0;
1566         PetscInt lastPart = 0, lastLargePart = 0;
1567         PetscInt rem = nvGlobal % nparts;
1568         PetscInt pSmall = nvGlobal/nparts;
1569         PetscInt pBig = nvGlobal/nparts + 1;
1570 
1571         if (rem) {
1572           firstLargePart = myFirst / pBig;
1573           lastLargePart  = myLast  / pBig;
1574 
1575           if (firstLargePart < rem) {
1576             firstPart = firstLargePart;
1577           } else {
1578             firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1579           }
1580           if (lastLargePart < rem) {
1581             lastPart = lastLargePart;
1582           } else {
1583             lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1584           }
1585         } else {
1586           firstPart = myFirst / (nvGlobal/nparts);
1587           lastPart  = myLast  / (nvGlobal/nparts);
1588         }
1589 
1590         for (np = firstPart; np <= lastPart; np++) {
1591           PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1592           PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1593 
1594           PartStart = PetscMax(PartStart,myFirst);
1595           PartEnd   = PetscMin(PartEnd,myLast+1);
1596           ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1597         }
1598       }
1599     }
1600   }
1601   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1602   PetscFunctionReturn(0);
1603 }
1604 
1605 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1606 {
1607   PetscFunctionBegin;
1608   part->noGraph        = PETSC_TRUE;
1609   part->ops->view      = PetscPartitionerView_Simple;
1610   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1611   part->ops->partition = PetscPartitionerPartition_Simple;
1612   PetscFunctionReturn(0);
1613 }
1614 
1615 /*MC
1616   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1617 
1618   Level: intermediate
1619 
1620 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1621 M*/
1622 
1623 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1624 {
1625   PetscPartitioner_Simple *p;
1626   PetscErrorCode           ierr;
1627 
1628   PetscFunctionBegin;
1629   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1630   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1631   part->data = p;
1632 
1633   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1634   PetscFunctionReturn(0);
1635 }
1636 
1637 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1638 {
1639   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1640   PetscErrorCode          ierr;
1641 
1642   PetscFunctionBegin;
1643   ierr = PetscFree(p);CHKERRQ(ierr);
1644   PetscFunctionReturn(0);
1645 }
1646 
1647 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1648 {
1649   PetscFunctionBegin;
1650   PetscFunctionReturn(0);
1651 }
1652 
1653 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1654 {
1655   PetscBool      iascii;
1656   PetscErrorCode ierr;
1657 
1658   PetscFunctionBegin;
1659   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1660   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1661   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1662   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1663   PetscFunctionReturn(0);
1664 }
1665 
1666 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1667 {
1668   PetscInt       np;
1669   PetscErrorCode ierr;
1670 
1671   PetscFunctionBegin;
1672   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1673   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1674   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1675   PetscFunctionReturn(0);
1676 }
1677 
1678 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1679 {
1680   PetscFunctionBegin;
1681   part->noGraph        = PETSC_TRUE;
1682   part->ops->view      = PetscPartitionerView_Gather;
1683   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1684   part->ops->partition = PetscPartitionerPartition_Gather;
1685   PetscFunctionReturn(0);
1686 }
1687 
1688 /*MC
1689   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1690 
1691   Level: intermediate
1692 
1693 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1694 M*/
1695 
1696 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1697 {
1698   PetscPartitioner_Gather *p;
1699   PetscErrorCode           ierr;
1700 
1701   PetscFunctionBegin;
1702   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1703   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1704   part->data = p;
1705 
1706   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1707   PetscFunctionReturn(0);
1708 }
1709 
1710 
1711 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1712 {
1713   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1714   PetscErrorCode          ierr;
1715 
1716   PetscFunctionBegin;
1717   ierr = PetscFree(p);CHKERRQ(ierr);
1718   PetscFunctionReturn(0);
1719 }
1720 
1721 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1722 {
1723   PetscFunctionBegin;
1724   PetscFunctionReturn(0);
1725 }
1726 
1727 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1728 {
1729   PetscBool      iascii;
1730   PetscErrorCode ierr;
1731 
1732   PetscFunctionBegin;
1733   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1734   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1735   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1736   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1737   PetscFunctionReturn(0);
1738 }
1739 
1740 #if defined(PETSC_HAVE_CHACO)
1741 #if defined(PETSC_HAVE_UNISTD_H)
1742 #include <unistd.h>
1743 #endif
1744 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1745 #include <chaco.h>
1746 #else
1747 /* Older versions of Chaco do not have an include file */
1748 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1749                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1750                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1751                        int mesh_dims[3], double *goal, int global_method, int local_method,
1752                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1753 #endif
1754 extern int FREE_GRAPH;
1755 #endif
1756 
1757 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1758 {
1759 #if defined(PETSC_HAVE_CHACO)
1760   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1761   MPI_Comm       comm;
1762   int            nvtxs          = numVertices; /* number of vertices in full graph */
1763   int           *vwgts          = NULL;   /* weights for all vertices */
1764   float         *ewgts          = NULL;   /* weights for all edges */
1765   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1766   char          *outassignname  = NULL;   /*  name of assignment output file */
1767   char          *outfilename    = NULL;   /* output file name */
1768   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1769   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1770   int            mesh_dims[3];            /* dimensions of mesh of processors */
1771   double        *goal          = NULL;    /* desired set sizes for each set */
1772   int            global_method = 1;       /* global partitioning algorithm */
1773   int            local_method  = 1;       /* local partitioning algorithm */
1774   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1775   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1776   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1777   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1778   long           seed          = 123636512; /* for random graph mutations */
1779 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1780   int           *assignment;              /* Output partition */
1781 #else
1782   short int     *assignment;              /* Output partition */
1783 #endif
1784   int            fd_stdout, fd_pipe[2];
1785   PetscInt      *points;
1786   int            i, v, p;
1787   PetscErrorCode ierr;
1788 
1789   PetscFunctionBegin;
1790   ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
1791   if (PetscDefined (USE_DEBUG)) {
1792     int ival,isum;
1793     PetscBool distributed;
1794 
1795     ival = (numVertices > 0);
1796     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1797     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1798     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1799   }
1800   if (!numVertices) { /* distributed case, return if not holding the graph */
1801     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1802     PetscFunctionReturn(0);
1803   }
1804   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1805   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1806 
1807   if (global_method == INERTIAL_METHOD) {
1808     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1809     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1810   }
1811   mesh_dims[0] = nparts;
1812   mesh_dims[1] = 1;
1813   mesh_dims[2] = 1;
1814   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1815   /* Chaco outputs to stdout. We redirect this to a buffer. */
1816   /* TODO: check error codes for UNIX calls */
1817 #if defined(PETSC_HAVE_UNISTD_H)
1818   {
1819     int piperet;
1820     piperet = pipe(fd_pipe);
1821     if (piperet) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"Could not create pipe");
1822     fd_stdout = dup(1);
1823     close(1);
1824     dup2(fd_pipe[1], 1);
1825   }
1826 #endif
1827   if (part->usevwgt) { ierr = PetscInfo(part,"PETSCPARTITIONERCHACO ignores vertex weights\n");CHKERRQ(ierr); }
1828   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1829                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1830                    vmax, ndims, eigtol, seed);
1831 #if defined(PETSC_HAVE_UNISTD_H)
1832   {
1833     char msgLog[10000];
1834     int  count;
1835 
1836     fflush(stdout);
1837     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1838     if (count < 0) count = 0;
1839     msgLog[count] = 0;
1840     close(1);
1841     dup2(fd_stdout, 1);
1842     close(fd_stdout);
1843     close(fd_pipe[0]);
1844     close(fd_pipe[1]);
1845     if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1846   }
1847 #else
1848   if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1849 #endif
1850   /* Convert to PetscSection+IS */
1851   for (v = 0; v < nvtxs; ++v) {
1852     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1853   }
1854   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1855   for (p = 0, i = 0; p < nparts; ++p) {
1856     for (v = 0; v < nvtxs; ++v) {
1857       if (assignment[v] == p) points[i++] = v;
1858     }
1859   }
1860   if (i != nvtxs) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1861   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1862   if (global_method == INERTIAL_METHOD) {
1863     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1864   }
1865   ierr = PetscFree(assignment);CHKERRQ(ierr);
1866   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1867   PetscFunctionReturn(0);
1868 #else
1869   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1870 #endif
1871 }
1872 
1873 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1874 {
1875   PetscFunctionBegin;
1876   part->noGraph        = PETSC_FALSE;
1877   part->ops->view      = PetscPartitionerView_Chaco;
1878   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1879   part->ops->partition = PetscPartitionerPartition_Chaco;
1880   PetscFunctionReturn(0);
1881 }
1882 
1883 /*MC
1884   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1885 
1886   Level: intermediate
1887 
1888 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1889 M*/
1890 
1891 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1892 {
1893   PetscPartitioner_Chaco *p;
1894   PetscErrorCode          ierr;
1895 
1896   PetscFunctionBegin;
1897   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1898   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1899   part->data = p;
1900 
1901   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1902   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1903   PetscFunctionReturn(0);
1904 }
1905 
1906 static const char *ptypes[] = {"kway", "rb"};
1907 
1908 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1909 {
1910   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1911   PetscErrorCode             ierr;
1912 
1913   PetscFunctionBegin;
1914   ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
1915   ierr = PetscFree(p);CHKERRQ(ierr);
1916   PetscFunctionReturn(0);
1917 }
1918 
1919 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1920 {
1921   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1922   PetscErrorCode             ierr;
1923 
1924   PetscFunctionBegin;
1925   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1926   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1927   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1928   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1929   ierr = PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);CHKERRQ(ierr);
1930   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1931   PetscFunctionReturn(0);
1932 }
1933 
1934 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1935 {
1936   PetscBool      iascii;
1937   PetscErrorCode ierr;
1938 
1939   PetscFunctionBegin;
1940   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1941   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1942   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1943   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1944   PetscFunctionReturn(0);
1945 }
1946 
1947 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1948 {
1949   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1950   PetscErrorCode            ierr;
1951 
1952   PetscFunctionBegin;
1953   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1954   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1955   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1956   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1957   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);CHKERRQ(ierr);
1958   ierr = PetscOptionsTail();CHKERRQ(ierr);
1959   PetscFunctionReturn(0);
1960 }
1961 
1962 #if defined(PETSC_HAVE_PARMETIS)
1963 #include <parmetis.h>
1964 #endif
1965 
1966 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1967 {
1968 #if defined(PETSC_HAVE_PARMETIS)
1969   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1970   MPI_Comm       comm;
1971   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1972   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1973   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1974   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1975   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1976   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1977   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1978   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1979   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1980   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1981   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1982   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1983   PetscInt       options[64];               /* Options */
1984   PetscInt       v, i, *assignment, *points;
1985   PetscMPIInt    p, size, rank;
1986   PetscBool      hasempty = PETSC_FALSE;
1987   PetscErrorCode ierr;
1988 
1989   PetscFunctionBegin;
1990   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1991   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1992   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1993   /* Calculate vertex distribution */
1994   ierr = PetscMalloc4(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1995   vtxdist[0] = 0;
1996   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1997   for (p = 2; p <= size; ++p) {
1998     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
1999     vtxdist[p] += vtxdist[p-1];
2000   }
2001   /* null graph */
2002   if (vtxdist[size] == 0) {
2003     ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
2004     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2005     PetscFunctionReturn(0);
2006   }
2007   /* Calculate partition weights */
2008   if (targetSection) {
2009     PetscInt p;
2010     real_t   sumt = 0.0;
2011 
2012     for (p = 0; p < nparts; ++p) {
2013       PetscInt tpd;
2014 
2015       ierr = PetscSectionGetDof(targetSection,p,&tpd);CHKERRQ(ierr);
2016       sumt += tpd;
2017       tpwgts[p] = tpd;
2018     }
2019     if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
2020       for (p = 0, sumt = 0.0; p < nparts; ++p) {
2021         tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
2022         sumt += tpwgts[p];
2023       }
2024       for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
2025       for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
2026       tpwgts[nparts - 1] = 1. - sumt;
2027     }
2028   } else {
2029     for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0/nparts;
2030   }
2031   ubvec[0] = pm->imbalanceRatio;
2032 
2033   /* Weight cells */
2034   if (vertSection) {
2035     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
2036     for (v = 0; v < nvtxs; ++v) {
2037       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
2038     }
2039     wgtflag |= 2; /* have weights on graph vertices */
2040   }
2041 
2042   for (p = 0; !vtxdist[p+1] && p < size; ++p);
2043   if (vtxdist[p+1] == vtxdist[size]) {
2044     if (rank == p) {
2045       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
2046       options[METIS_OPTION_DBGLVL] = pm->debugFlag;
2047       options[METIS_OPTION_SEED]   = pm->randomSeed;
2048       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
2049       if (metis_ptype == 1) {
2050         PetscStackPush("METIS_PartGraphRecursive");
2051         ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2052         PetscStackPop;
2053         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
2054       } else {
2055         /*
2056          It would be nice to activate the two options below, but they would need some actual testing.
2057          - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
2058          - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
2059         */
2060         /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
2061         /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
2062         PetscStackPush("METIS_PartGraphKway");
2063         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2064         PetscStackPop;
2065         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
2066       }
2067     }
2068   } else {
2069     MPI_Comm pcomm;
2070 
2071     options[0] = 1; /*use options */
2072     options[1] = pm->debugFlag;
2073     options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
2074 
2075     if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
2076       PetscInt cnt;
2077 
2078       ierr = MPI_Comm_split(pm->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
2079       for (p=0,cnt=0;p<size;p++) {
2080         if (vtxdist[p+1] != vtxdist[p]) {
2081           vtxdist[cnt+1] = vtxdist[p+1];
2082           cnt++;
2083         }
2084       }
2085     } else pcomm = pm->pcomm;
2086     if (nvtxs) {
2087       PetscStackPush("ParMETIS_V3_PartKway");
2088       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
2089       PetscStackPop;
2090       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
2091     }
2092     if (hasempty) {
2093       ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
2094     }
2095   }
2096 
2097   /* Convert to PetscSection+IS */
2098   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2099   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2100   for (p = 0, i = 0; p < nparts; ++p) {
2101     for (v = 0; v < nvtxs; ++v) {
2102       if (assignment[v] == p) points[i++] = v;
2103     }
2104   }
2105   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2106   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2107   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
2108   ierr = PetscFree(vwgt);CHKERRQ(ierr);
2109   PetscFunctionReturn(0);
2110 #else
2111   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
2112 #endif
2113 }
2114 
2115 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
2116 {
2117   PetscFunctionBegin;
2118   part->noGraph             = PETSC_FALSE;
2119   part->ops->view           = PetscPartitionerView_ParMetis;
2120   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
2121   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
2122   part->ops->partition      = PetscPartitionerPartition_ParMetis;
2123   PetscFunctionReturn(0);
2124 }
2125 
2126 /*MC
2127   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
2128 
2129   Level: intermediate
2130 
2131   Options Database Keys:
2132 +  -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
2133 .  -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
2134 .  -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
2135 -  -petscpartitioner_parmetis_seed <int> - Random seed
2136 
2137   Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
2138 
2139 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2140 M*/
2141 
2142 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
2143 {
2144   PetscPartitioner_ParMetis *p;
2145   PetscErrorCode          ierr;
2146 
2147   PetscFunctionBegin;
2148   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2149   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
2150   part->data = p;
2151 
2152   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
2153   p->ptype          = 0;
2154   p->imbalanceRatio = 1.05;
2155   p->debugFlag      = 0;
2156   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
2157 
2158   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
2159   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
2160   PetscFunctionReturn(0);
2161 }
2162 
2163 #if defined(PETSC_HAVE_PTSCOTCH)
2164 
2165 EXTERN_C_BEGIN
2166 #include <ptscotch.h>
2167 EXTERN_C_END
2168 
2169 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
2170 
2171 static int PTScotch_Strategy(PetscInt strategy)
2172 {
2173   switch (strategy) {
2174   case  0: return SCOTCH_STRATDEFAULT;
2175   case  1: return SCOTCH_STRATQUALITY;
2176   case  2: return SCOTCH_STRATSPEED;
2177   case  3: return SCOTCH_STRATBALANCE;
2178   case  4: return SCOTCH_STRATSAFETY;
2179   case  5: return SCOTCH_STRATSCALABILITY;
2180   case  6: return SCOTCH_STRATRECURSIVE;
2181   case  7: return SCOTCH_STRATREMAP;
2182   default: return SCOTCH_STRATDEFAULT;
2183   }
2184 }
2185 
2186 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2187                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
2188 {
2189   SCOTCH_Graph   grafdat;
2190   SCOTCH_Strat   stradat;
2191   SCOTCH_Num     vertnbr = n;
2192   SCOTCH_Num     edgenbr = xadj[n];
2193   SCOTCH_Num*    velotab = vtxwgt;
2194   SCOTCH_Num*    edlotab = adjwgt;
2195   SCOTCH_Num     flagval = strategy;
2196   double         kbalval = imbalance;
2197   PetscErrorCode ierr;
2198 
2199   PetscFunctionBegin;
2200   {
2201     PetscBool flg = PETSC_TRUE;
2202     ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
2203     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2204     if (!flg) velotab = NULL;
2205   }
2206   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
2207   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
2208   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2209   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
2210   if (tpart) {
2211     SCOTCH_Arch archdat;
2212     ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2213     ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2214     ierr = SCOTCH_graphMap(&grafdat, &archdat, &stradat, part);CHKERRPTSCOTCH(ierr);
2215     SCOTCH_archExit(&archdat);
2216   } else {
2217     ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
2218   }
2219   SCOTCH_stratExit(&stradat);
2220   SCOTCH_graphExit(&grafdat);
2221   PetscFunctionReturn(0);
2222 }
2223 
2224 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2225                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
2226 {
2227   PetscMPIInt     procglbnbr;
2228   PetscMPIInt     proclocnum;
2229   SCOTCH_Arch     archdat;
2230   SCOTCH_Dgraph   grafdat;
2231   SCOTCH_Dmapping mappdat;
2232   SCOTCH_Strat    stradat;
2233   SCOTCH_Num      vertlocnbr;
2234   SCOTCH_Num      edgelocnbr;
2235   SCOTCH_Num*     veloloctab = vtxwgt;
2236   SCOTCH_Num*     edloloctab = adjwgt;
2237   SCOTCH_Num      flagval = strategy;
2238   double          kbalval = imbalance;
2239   PetscErrorCode  ierr;
2240 
2241   PetscFunctionBegin;
2242   {
2243     PetscBool flg = PETSC_TRUE;
2244     ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
2245     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2246     if (!flg) veloloctab = NULL;
2247   }
2248   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
2249   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
2250   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
2251   edgelocnbr = xadj[vertlocnbr];
2252 
2253   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
2254   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
2255   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2256   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
2257   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2258   if (tpart) { /* target partition weights */
2259     ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2260   } else {
2261     ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2262   }
2263   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
2264 
2265   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2266   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2267   SCOTCH_archExit(&archdat);
2268   SCOTCH_stratExit(&stradat);
2269   SCOTCH_dgraphExit(&grafdat);
2270   PetscFunctionReturn(0);
2271 }
2272 
2273 #endif /* PETSC_HAVE_PTSCOTCH */
2274 
2275 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2276 {
2277   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2278   PetscErrorCode             ierr;
2279 
2280   PetscFunctionBegin;
2281   ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
2282   ierr = PetscFree(p);CHKERRQ(ierr);
2283   PetscFunctionReturn(0);
2284 }
2285 
2286 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2287 {
2288   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2289   PetscErrorCode            ierr;
2290 
2291   PetscFunctionBegin;
2292   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
2293   ierr = PetscViewerASCIIPrintf(viewer,"using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
2294   ierr = PetscViewerASCIIPrintf(viewer,"using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
2295   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
2296   PetscFunctionReturn(0);
2297 }
2298 
2299 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2300 {
2301   PetscBool      iascii;
2302   PetscErrorCode ierr;
2303 
2304   PetscFunctionBegin;
2305   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2306   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
2307   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
2308   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
2309   PetscFunctionReturn(0);
2310 }
2311 
2312 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2313 {
2314   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2315   const char *const         *slist = PTScotchStrategyList;
2316   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2317   PetscBool                 flag;
2318   PetscErrorCode            ierr;
2319 
2320   PetscFunctionBegin;
2321   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
2322   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
2323   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
2324   ierr = PetscOptionsTail();CHKERRQ(ierr);
2325   PetscFunctionReturn(0);
2326 }
2327 
2328 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
2329 {
2330 #if defined(PETSC_HAVE_PTSCOTCH)
2331   MPI_Comm       comm;
2332   PetscInt       nvtxs = numVertices;   /* The number of vertices in full graph */
2333   PetscInt       *vtxdist;              /* Distribution of vertices across processes */
2334   PetscInt       *xadj   = start;       /* Start of edge list for each vertex */
2335   PetscInt       *adjncy = adjacency;   /* Edge lists for all vertices */
2336   PetscInt       *vwgt   = NULL;        /* Vertex weights */
2337   PetscInt       *adjwgt = NULL;        /* Edge weights */
2338   PetscInt       v, i, *assignment, *points;
2339   PetscMPIInt    size, rank, p;
2340   PetscBool      hasempty = PETSC_FALSE;
2341   PetscInt       *tpwgts = NULL;
2342   PetscErrorCode ierr;
2343 
2344   PetscFunctionBegin;
2345   ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
2346   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2347   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2348   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
2349   /* Calculate vertex distribution */
2350   vtxdist[0] = 0;
2351   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2352   for (p = 2; p <= size; ++p) {
2353     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
2354     vtxdist[p] += vtxdist[p-1];
2355   }
2356   /* null graph */
2357   if (vtxdist[size] == 0) {
2358     ierr = PetscFree2(vtxdist, assignment);CHKERRQ(ierr);
2359     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2360     PetscFunctionReturn(0);
2361   }
2362 
2363   /* Calculate vertex weights */
2364   if (vertSection) {
2365     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
2366     for (v = 0; v < nvtxs; ++v) {
2367       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
2368     }
2369   }
2370 
2371   /* Calculate partition weights */
2372   if (targetSection) {
2373     PetscInt sumw;
2374 
2375     ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
2376     for (p = 0, sumw = 0; p < nparts; ++p) {
2377       ierr = PetscSectionGetDof(targetSection,p,&tpwgts[p]);CHKERRQ(ierr);
2378       sumw += tpwgts[p];
2379     }
2380     if (!sumw) {
2381       ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2382     }
2383   }
2384 
2385   {
2386     PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2387     int                       strat = PTScotch_Strategy(pts->strategy);
2388     double                    imbal = (double)pts->imbalance;
2389 
2390     for (p = 0; !vtxdist[p+1] && p < size; ++p);
2391     if (vtxdist[p+1] == vtxdist[size]) {
2392       if (rank == p) {
2393         ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment);CHKERRQ(ierr);
2394       }
2395     } else {
2396       PetscInt cnt;
2397       MPI_Comm pcomm;
2398 
2399       if (hasempty) {
2400         ierr = MPI_Comm_split(pts->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
2401         for (p=0,cnt=0;p<size;p++) {
2402           if (vtxdist[p+1] != vtxdist[p]) {
2403             vtxdist[cnt+1] = vtxdist[p+1];
2404             cnt++;
2405           }
2406         }
2407       } else pcomm = pts->pcomm;
2408       if (nvtxs) {
2409         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm);CHKERRQ(ierr);
2410       }
2411       if (hasempty) {
2412         ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
2413       }
2414     }
2415   }
2416   ierr = PetscFree(vwgt);CHKERRQ(ierr);
2417   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2418 
2419   /* Convert to PetscSection+IS */
2420   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2421   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2422   for (p = 0, i = 0; p < nparts; ++p) {
2423     for (v = 0; v < nvtxs; ++v) {
2424       if (assignment[v] == p) points[i++] = v;
2425     }
2426   }
2427   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2428   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2429 
2430   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
2431   PetscFunctionReturn(0);
2432 #else
2433   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2434 #endif
2435 }
2436 
2437 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2438 {
2439   PetscFunctionBegin;
2440   part->noGraph             = PETSC_FALSE;
2441   part->ops->view           = PetscPartitionerView_PTScotch;
2442   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2443   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2444   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2445   PetscFunctionReturn(0);
2446 }
2447 
2448 /*MC
2449   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2450 
2451   Level: intermediate
2452 
2453   Options Database Keys:
2454 +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
2455 -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
2456 
2457   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
2458 
2459 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2460 M*/
2461 
2462 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2463 {
2464   PetscPartitioner_PTScotch *p;
2465   PetscErrorCode          ierr;
2466 
2467   PetscFunctionBegin;
2468   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2469   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2470   part->data = p;
2471 
2472   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
2473   p->strategy  = 0;
2474   p->imbalance = 0.01;
2475 
2476   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2477   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2478   PetscFunctionReturn(0);
2479 }
2480 
2481 /*@
2482   DMPlexGetPartitioner - Get the mesh partitioner
2483 
2484   Not collective
2485 
2486   Input Parameter:
2487 . dm - The DM
2488 
2489   Output Parameter:
2490 . part - The PetscPartitioner
2491 
2492   Level: developer
2493 
2494   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2495 
2496 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerDMPlexPartition(), PetscPartitionerCreate()
2497 @*/
2498 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2499 {
2500   DM_Plex       *mesh = (DM_Plex *) dm->data;
2501 
2502   PetscFunctionBegin;
2503   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2504   PetscValidPointer(part, 2);
2505   *part = mesh->partitioner;
2506   PetscFunctionReturn(0);
2507 }
2508 
2509 /*@
2510   DMPlexSetPartitioner - Set the mesh partitioner
2511 
2512   logically collective on DM
2513 
2514   Input Parameters:
2515 + dm - The DM
2516 - part - The partitioner
2517 
2518   Level: developer
2519 
2520   Note: Any existing PetscPartitioner will be destroyed.
2521 
2522 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2523 @*/
2524 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2525 {
2526   DM_Plex       *mesh = (DM_Plex *) dm->data;
2527   PetscErrorCode ierr;
2528 
2529   PetscFunctionBegin;
2530   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2531   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2532   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2533   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2534   mesh->partitioner = part;
2535   PetscFunctionReturn(0);
2536 }
2537 
2538 static PetscErrorCode DMPlexAddClosure_Private(DM dm, PetscHSetI ht, PetscInt point)
2539 {
2540   const PetscInt *cone;
2541   PetscInt       coneSize, c;
2542   PetscBool      missing;
2543   PetscErrorCode ierr;
2544 
2545   PetscFunctionBeginHot;
2546   ierr = PetscHSetIQueryAdd(ht, point, &missing);CHKERRQ(ierr);
2547   if (missing) {
2548     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2549     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2550     for (c = 0; c < coneSize; c++) {
2551       ierr = DMPlexAddClosure_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2552     }
2553   }
2554   PetscFunctionReturn(0);
2555 }
2556 
2557 PETSC_UNUSED static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2558 {
2559   PetscErrorCode ierr;
2560 
2561   PetscFunctionBegin;
2562   if (up) {
2563     PetscInt parent;
2564 
2565     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2566     if (parent != point) {
2567       PetscInt closureSize, *closure = NULL, i;
2568 
2569       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2570       for (i = 0; i < closureSize; i++) {
2571         PetscInt cpoint = closure[2*i];
2572 
2573         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2574         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2575       }
2576       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2577     }
2578   }
2579   if (down) {
2580     PetscInt numChildren;
2581     const PetscInt *children;
2582 
2583     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2584     if (numChildren) {
2585       PetscInt i;
2586 
2587       for (i = 0; i < numChildren; i++) {
2588         PetscInt cpoint = children[i];
2589 
2590         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2591         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2592       }
2593     }
2594   }
2595   PetscFunctionReturn(0);
2596 }
2597 
2598 static PetscErrorCode DMPlexAddClosureTree_Up_Private(DM dm, PetscHSetI ht, PetscInt point)
2599 {
2600   PetscInt       parent;
2601   PetscErrorCode ierr;
2602 
2603   PetscFunctionBeginHot;
2604   ierr = DMPlexGetTreeParent(dm, point, &parent,NULL);CHKERRQ(ierr);
2605   if (point != parent) {
2606     const PetscInt *cone;
2607     PetscInt       coneSize, c;
2608 
2609     ierr = DMPlexAddClosureTree_Up_Private(dm, ht, parent);CHKERRQ(ierr);
2610     ierr = DMPlexAddClosure_Private(dm, ht, parent);CHKERRQ(ierr);
2611     ierr = DMPlexGetCone(dm, parent, &cone);CHKERRQ(ierr);
2612     ierr = DMPlexGetConeSize(dm, parent, &coneSize);CHKERRQ(ierr);
2613     for (c = 0; c < coneSize; c++) {
2614       const PetscInt cp = cone[c];
2615 
2616       ierr = DMPlexAddClosureTree_Up_Private(dm, ht, cp);CHKERRQ(ierr);
2617     }
2618   }
2619   PetscFunctionReturn(0);
2620 }
2621 
2622 static PetscErrorCode DMPlexAddClosureTree_Down_Private(DM dm, PetscHSetI ht, PetscInt point)
2623 {
2624   PetscInt       i, numChildren;
2625   const PetscInt *children;
2626   PetscErrorCode ierr;
2627 
2628   PetscFunctionBeginHot;
2629   ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
2630   for (i = 0; i < numChildren; i++) {
2631     ierr = PetscHSetIAdd(ht, children[i]);CHKERRQ(ierr);
2632   }
2633   PetscFunctionReturn(0);
2634 }
2635 
2636 static PetscErrorCode DMPlexAddClosureTree_Private(DM dm, PetscHSetI ht, PetscInt point)
2637 {
2638   const PetscInt *cone;
2639   PetscInt       coneSize, c;
2640   PetscErrorCode ierr;
2641 
2642   PetscFunctionBeginHot;
2643   ierr = PetscHSetIAdd(ht, point);CHKERRQ(ierr);
2644   ierr = DMPlexAddClosureTree_Up_Private(dm, ht, point);CHKERRQ(ierr);
2645   ierr = DMPlexAddClosureTree_Down_Private(dm, ht, point);CHKERRQ(ierr);
2646   ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2647   ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2648   for (c = 0; c < coneSize; c++) {
2649     ierr = DMPlexAddClosureTree_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2650   }
2651   PetscFunctionReturn(0);
2652 }
2653 
2654 PetscErrorCode DMPlexClosurePoints_Private(DM dm, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2655 {
2656   DM_Plex         *mesh = (DM_Plex *)dm->data;
2657   const PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2658   PetscInt        nelems, *elems, off = 0, p;
2659   PetscHSetI      ht;
2660   PetscErrorCode  ierr;
2661 
2662   PetscFunctionBegin;
2663   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2664   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2665   if (!hasTree) {
2666     for (p = 0; p < numPoints; ++p) {
2667       ierr = DMPlexAddClosure_Private(dm, ht, points[p]);CHKERRQ(ierr);
2668     }
2669   } else {
2670 #if 1
2671     for (p = 0; p < numPoints; ++p) {
2672       ierr = DMPlexAddClosureTree_Private(dm, ht, points[p]);CHKERRQ(ierr);
2673     }
2674 #else
2675     PetscInt  *closure = NULL, closureSize, c;
2676     for (p = 0; p < numPoints; ++p) {
2677       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2678       for (c = 0; c < closureSize*2; c += 2) {
2679         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2680         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2681       }
2682     }
2683     if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2684 #endif
2685   }
2686   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2687   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2688   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2689   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2690   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2691   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2692   PetscFunctionReturn(0);
2693 }
2694 
2695 /*@
2696   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2697 
2698   Input Parameters:
2699 + dm     - The DM
2700 - label  - DMLabel assinging ranks to remote roots
2701 
2702   Level: developer
2703 
2704 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2705 @*/
2706 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2707 {
2708   IS              rankIS,   pointIS, closureIS;
2709   const PetscInt *ranks,   *points;
2710   PetscInt        numRanks, numPoints, r;
2711   PetscErrorCode  ierr;
2712 
2713   PetscFunctionBegin;
2714   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2715   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2716   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2717   for (r = 0; r < numRanks; ++r) {
2718     const PetscInt rank = ranks[r];
2719     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2720     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2721     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2722     ierr = DMPlexClosurePoints_Private(dm, numPoints, points, &closureIS);CHKERRQ(ierr);
2723     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2724     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2725     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2726     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2727   }
2728   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2729   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2730   PetscFunctionReturn(0);
2731 }
2732 
2733 /*@
2734   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2735 
2736   Input Parameters:
2737 + dm     - The DM
2738 - label  - DMLabel assinging ranks to remote roots
2739 
2740   Level: developer
2741 
2742 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2743 @*/
2744 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2745 {
2746   IS              rankIS,   pointIS;
2747   const PetscInt *ranks,   *points;
2748   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2749   PetscInt       *adj = NULL;
2750   PetscErrorCode  ierr;
2751 
2752   PetscFunctionBegin;
2753   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2754   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2755   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2756   for (r = 0; r < numRanks; ++r) {
2757     const PetscInt rank = ranks[r];
2758 
2759     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2760     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2761     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2762     for (p = 0; p < numPoints; ++p) {
2763       adjSize = PETSC_DETERMINE;
2764       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2765       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2766     }
2767     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2768     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2769   }
2770   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2771   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2772   ierr = PetscFree(adj);CHKERRQ(ierr);
2773   PetscFunctionReturn(0);
2774 }
2775 
2776 /*@
2777   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2778 
2779   Input Parameters:
2780 + dm     - The DM
2781 - label  - DMLabel assinging ranks to remote roots
2782 
2783   Level: developer
2784 
2785   Note: This is required when generating multi-level overlaps to capture
2786   overlap points from non-neighbouring partitions.
2787 
2788 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2789 @*/
2790 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2791 {
2792   MPI_Comm        comm;
2793   PetscMPIInt     rank;
2794   PetscSF         sfPoint;
2795   DMLabel         lblRoots, lblLeaves;
2796   IS              rankIS, pointIS;
2797   const PetscInt *ranks;
2798   PetscInt        numRanks, r;
2799   PetscErrorCode  ierr;
2800 
2801   PetscFunctionBegin;
2802   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2803   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2804   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2805   /* Pull point contributions from remote leaves into local roots */
2806   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2807   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2808   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2809   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2810   for (r = 0; r < numRanks; ++r) {
2811     const PetscInt remoteRank = ranks[r];
2812     if (remoteRank == rank) continue;
2813     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2814     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2815     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2816   }
2817   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2818   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2819   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2820   /* Push point contributions from roots into remote leaves */
2821   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2822   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2823   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2824   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2825   for (r = 0; r < numRanks; ++r) {
2826     const PetscInt remoteRank = ranks[r];
2827     if (remoteRank == rank) continue;
2828     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2829     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2830     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2831   }
2832   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2833   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2834   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2835   PetscFunctionReturn(0);
2836 }
2837 
2838 /*@
2839   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2840 
2841   Input Parameters:
2842 + dm        - The DM
2843 . rootLabel - DMLabel assinging ranks to local roots
2844 - processSF - A star forest mapping into the local index on each remote rank
2845 
2846   Output Parameter:
2847 . leafLabel - DMLabel assinging ranks to remote roots
2848 
2849   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2850   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2851 
2852   Level: developer
2853 
2854 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2855 @*/
2856 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2857 {
2858   MPI_Comm           comm;
2859   PetscMPIInt        rank, size, r;
2860   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2861   PetscSF            sfPoint;
2862   PetscSection       rootSection;
2863   PetscSFNode       *rootPoints, *leafPoints;
2864   const PetscSFNode *remote;
2865   const PetscInt    *local, *neighbors;
2866   IS                 valueIS;
2867   PetscBool          mpiOverflow = PETSC_FALSE;
2868   PetscErrorCode     ierr;
2869 
2870   PetscFunctionBegin;
2871   ierr = PetscLogEventBegin(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2872   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2873   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2874   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2875   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2876 
2877   /* Convert to (point, rank) and use actual owners */
2878   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2879   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2880   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2881   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2882   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2883   for (n = 0; n < numNeighbors; ++n) {
2884     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2885     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2886   }
2887   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2888   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2889   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2890   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2891   for (n = 0; n < numNeighbors; ++n) {
2892     IS              pointIS;
2893     const PetscInt *points;
2894 
2895     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2896     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2897     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2898     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2899     for (p = 0; p < numPoints; ++p) {
2900       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2901       else       {l = -1;}
2902       if (l >= 0) {rootPoints[off+p] = remote[l];}
2903       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2904     }
2905     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2906     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2907   }
2908 
2909   /* Try to communicate overlap using All-to-All */
2910   if (!processSF) {
2911     PetscInt64  counter = 0;
2912     PetscBool   locOverflow = PETSC_FALSE;
2913     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2914 
2915     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2916     for (n = 0; n < numNeighbors; ++n) {
2917       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2918       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2919 #if defined(PETSC_USE_64BIT_INDICES)
2920       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2921       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2922 #endif
2923       scounts[neighbors[n]] = (PetscMPIInt) dof;
2924       sdispls[neighbors[n]] = (PetscMPIInt) off;
2925     }
2926     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2927     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2928     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2929     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2930     if (!mpiOverflow) {
2931       ierr = PetscInfo(dm,"Using Alltoallv for mesh distribution\n");CHKERRQ(ierr);
2932       leafSize = (PetscInt) counter;
2933       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2934       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2935     }
2936     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2937   }
2938 
2939   /* Communicate overlap using process star forest */
2940   if (processSF || mpiOverflow) {
2941     PetscSF      procSF;
2942     PetscSection leafSection;
2943 
2944     if (processSF) {
2945       ierr = PetscInfo(dm,"Using processSF for mesh distribution\n");CHKERRQ(ierr);
2946       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2947       procSF = processSF;
2948     } else {
2949       ierr = PetscInfo(dm,"Using processSF for mesh distribution (MPI overflow)\n");CHKERRQ(ierr);
2950       ierr = PetscSFCreate(comm,&procSF);CHKERRQ(ierr);
2951       ierr = PetscSFSetGraphWithPattern(procSF,NULL,PETSCSF_PATTERN_ALLTOALL);CHKERRQ(ierr);
2952     }
2953 
2954     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2955     ierr = DMPlexDistributeData(dm, procSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2956     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2957     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2958     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2959   }
2960 
2961   for (p = 0; p < leafSize; p++) {
2962     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2963   }
2964 
2965   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2966   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2967   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2968   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2969   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2970   ierr = PetscLogEventEnd(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2971   PetscFunctionReturn(0);
2972 }
2973 
2974 /*@
2975   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2976 
2977   Input Parameters:
2978 + dm    - The DM
2979 - label - DMLabel assinging ranks to remote roots
2980 
2981   Output Parameter:
2982 . sf    - The star forest communication context encapsulating the defined mapping
2983 
2984   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2985 
2986   Level: developer
2987 
2988 .seealso: DMPlexDistribute(), DMPlexCreateOverlap()
2989 @*/
2990 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2991 {
2992   PetscMPIInt     rank;
2993   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0, nNeighbors;
2994   PetscSFNode    *remotePoints;
2995   IS              remoteRootIS, neighborsIS;
2996   const PetscInt *remoteRoots, *neighbors;
2997   PetscErrorCode ierr;
2998 
2999   PetscFunctionBegin;
3000   ierr = PetscLogEventBegin(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
3001   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
3002 
3003   ierr = DMLabelGetValueIS(label, &neighborsIS);CHKERRQ(ierr);
3004 #if 0
3005   {
3006     IS is;
3007     ierr = ISDuplicate(neighborsIS, &is);CHKERRQ(ierr);
3008     ierr = ISSort(is);CHKERRQ(ierr);
3009     ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
3010     neighborsIS = is;
3011   }
3012 #endif
3013   ierr = ISGetLocalSize(neighborsIS, &nNeighbors);CHKERRQ(ierr);
3014   ierr = ISGetIndices(neighborsIS, &neighbors);CHKERRQ(ierr);
3015   for (numRemote = 0, n = 0; n < nNeighbors; ++n) {
3016     ierr = DMLabelGetStratumSize(label, neighbors[n], &numPoints);CHKERRQ(ierr);
3017     numRemote += numPoints;
3018   }
3019   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
3020   /* Put owned points first */
3021   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
3022   if (numPoints > 0) {
3023     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
3024     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3025     for (p = 0; p < numPoints; p++) {
3026       remotePoints[idx].index = remoteRoots[p];
3027       remotePoints[idx].rank = rank;
3028       idx++;
3029     }
3030     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3031     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
3032   }
3033   /* Now add remote points */
3034   for (n = 0; n < nNeighbors; ++n) {
3035     const PetscInt nn = neighbors[n];
3036 
3037     ierr = DMLabelGetStratumSize(label, nn, &numPoints);CHKERRQ(ierr);
3038     if (nn == rank || numPoints <= 0) continue;
3039     ierr = DMLabelGetStratumIS(label, nn, &remoteRootIS);CHKERRQ(ierr);
3040     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3041     for (p = 0; p < numPoints; p++) {
3042       remotePoints[idx].index = remoteRoots[p];
3043       remotePoints[idx].rank = nn;
3044       idx++;
3045     }
3046     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3047     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
3048   }
3049   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
3050   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3051   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
3052   ierr = PetscSFSetUp(*sf);CHKERRQ(ierr);
3053   ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
3054   ierr = PetscLogEventEnd(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
3055   PetscFunctionReturn(0);
3056 }
3057 
3058 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
3059  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
3060  * them out in that case. */
3061 #if defined(PETSC_HAVE_PARMETIS)
3062 /*@C
3063 
3064   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
3065 
3066   Input parameters:
3067 + dm                - The DMPlex object.
3068 . n                 - The number of points.
3069 . pointsToRewrite   - The points in the SF whose ownership will change.
3070 . targetOwners      - New owner for each element in pointsToRewrite.
3071 - degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
3072 
3073   Level: developer
3074 
3075 @*/
3076 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
3077 {
3078   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
3079   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
3080   PetscSFNode  *leafLocationsNew;
3081   const         PetscSFNode *iremote;
3082   const         PetscInt *ilocal;
3083   PetscBool    *isLeaf;
3084   PetscSF       sf;
3085   MPI_Comm      comm;
3086   PetscMPIInt   rank, size;
3087 
3088   PetscFunctionBegin;
3089   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3090   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3091   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3092   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3093 
3094   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3095   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3096   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3097   for (i=0; i<pEnd-pStart; i++) {
3098     isLeaf[i] = PETSC_FALSE;
3099   }
3100   for (i=0; i<nleafs; i++) {
3101     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3102   }
3103 
3104   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
3105   cumSumDegrees[0] = 0;
3106   for (i=1; i<=pEnd-pStart; i++) {
3107     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
3108   }
3109   sumDegrees = cumSumDegrees[pEnd-pStart];
3110   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
3111 
3112   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
3113   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
3114   for (i=0; i<pEnd-pStart; i++) {
3115     rankOnLeafs[i] = rank;
3116   }
3117   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
3118   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
3119   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
3120 
3121   /* get the remote local points of my leaves */
3122   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
3123   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
3124   for (i=0; i<pEnd-pStart; i++) {
3125     points[i] = pStart+i;
3126   }
3127   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
3128   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
3129   ierr = PetscFree(points);CHKERRQ(ierr);
3130   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
3131   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
3132   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
3133   for (i=0; i<pEnd-pStart; i++) {
3134     newOwners[i] = -1;
3135     newNumbers[i] = -1;
3136   }
3137   {
3138     PetscInt oldNumber, newNumber, oldOwner, newOwner;
3139     for (i=0; i<n; i++) {
3140       oldNumber = pointsToRewrite[i];
3141       newNumber = -1;
3142       oldOwner = rank;
3143       newOwner = targetOwners[i];
3144       if (oldOwner == newOwner) {
3145         newNumber = oldNumber;
3146       } else {
3147         for (j=0; j<degrees[oldNumber]; j++) {
3148           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
3149             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
3150             break;
3151           }
3152         }
3153       }
3154       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
3155 
3156       newOwners[oldNumber] = newOwner;
3157       newNumbers[oldNumber] = newNumber;
3158     }
3159   }
3160   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
3161   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
3162   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
3163 
3164   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
3165   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
3166   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
3167   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
3168 
3169   /* Now count how many leafs we have on each processor. */
3170   leafCounter=0;
3171   for (i=0; i<pEnd-pStart; i++) {
3172     if (newOwners[i] >= 0) {
3173       if (newOwners[i] != rank) {
3174         leafCounter++;
3175       }
3176     } else {
3177       if (isLeaf[i]) {
3178         leafCounter++;
3179       }
3180     }
3181   }
3182 
3183   /* Now set up the new sf by creating the leaf arrays */
3184   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
3185   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
3186 
3187   leafCounter = 0;
3188   counter = 0;
3189   for (i=0; i<pEnd-pStart; i++) {
3190     if (newOwners[i] >= 0) {
3191       if (newOwners[i] != rank) {
3192         leafsNew[leafCounter] = i;
3193         leafLocationsNew[leafCounter].rank = newOwners[i];
3194         leafLocationsNew[leafCounter].index = newNumbers[i];
3195         leafCounter++;
3196       }
3197     } else {
3198       if (isLeaf[i]) {
3199         leafsNew[leafCounter] = i;
3200         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
3201         leafLocationsNew[leafCounter].index = iremote[counter].index;
3202         leafCounter++;
3203       }
3204     }
3205     if (isLeaf[i]) {
3206       counter++;
3207     }
3208   }
3209 
3210   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
3211   ierr = PetscFree(newOwners);CHKERRQ(ierr);
3212   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
3213   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3214   PetscFunctionReturn(0);
3215 }
3216 
3217 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
3218 {
3219   PetscInt *distribution, min, max, sum, i, ierr;
3220   PetscMPIInt rank, size;
3221   PetscFunctionBegin;
3222   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3223   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3224   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
3225   for (i=0; i<n; i++) {
3226     if (part) distribution[part[i]] += vtxwgt[skip*i];
3227     else distribution[rank] += vtxwgt[skip*i];
3228   }
3229   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3230   min = distribution[0];
3231   max = distribution[0];
3232   sum = distribution[0];
3233   for (i=1; i<size; i++) {
3234     if (distribution[i]<min) min=distribution[i];
3235     if (distribution[i]>max) max=distribution[i];
3236     sum += distribution[i];
3237   }
3238   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
3239   ierr = PetscFree(distribution);CHKERRQ(ierr);
3240   PetscFunctionReturn(0);
3241 }
3242 
3243 #endif
3244 
3245 /*@
3246   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
3247 
3248   Input parameters:
3249 + dm               - The DMPlex object.
3250 . entityDepth      - depth of the entity to balance (0 -> balance vertices).
3251 . useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
3252 - parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
3253 
3254   Output parameters:
3255 . success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
3256 
3257   Level: intermediate
3258 
3259 @*/
3260 
3261 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
3262 {
3263 #if defined(PETSC_HAVE_PARMETIS)
3264   PetscSF     sf;
3265   PetscInt    ierr, i, j, idx, jdx;
3266   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
3267   const       PetscInt *degrees, *ilocal;
3268   const       PetscSFNode *iremote;
3269   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
3270   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
3271   PetscMPIInt rank, size;
3272   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
3273   const       PetscInt *cumSumVertices;
3274   PetscInt    offset, counter;
3275   PetscInt    lenadjncy;
3276   PetscInt    *xadj, *adjncy, *vtxwgt;
3277   PetscInt    lenxadj;
3278   PetscInt    *adjwgt = NULL;
3279   PetscInt    *part, *options;
3280   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
3281   real_t      *ubvec;
3282   PetscInt    *firstVertices, *renumbering;
3283   PetscInt    failed, failedGlobal;
3284   MPI_Comm    comm;
3285   Mat         A, Apre;
3286   const char *prefix = NULL;
3287   PetscViewer       viewer;
3288   PetscViewerFormat format;
3289   PetscLayout layout;
3290 
3291   PetscFunctionBegin;
3292   if (success) *success = PETSC_FALSE;
3293   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3294   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3295   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3296   if (size==1) PetscFunctionReturn(0);
3297 
3298   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3299 
3300   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
3301   if (viewer) {
3302     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
3303   }
3304 
3305   /* Figure out all points in the plex that we are interested in balancing. */
3306   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
3307   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3308   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
3309 
3310   for (i=0; i<pEnd-pStart; i++) {
3311     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
3312   }
3313 
3314   /* There are three types of points:
3315    * exclusivelyOwned: points that are owned by this process and only seen by this process
3316    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
3317    * leaf: a point that is seen by this process but owned by a different process
3318    */
3319   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3320   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3321   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3322   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
3323   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
3324   for (i=0; i<pEnd-pStart; i++) {
3325     isNonExclusivelyOwned[i] = PETSC_FALSE;
3326     isExclusivelyOwned[i] = PETSC_FALSE;
3327     isLeaf[i] = PETSC_FALSE;
3328   }
3329 
3330   /* start by marking all the leafs */
3331   for (i=0; i<nleafs; i++) {
3332     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3333   }
3334 
3335   /* for an owned point, we can figure out whether another processor sees it or
3336    * not by calculating its degree */
3337   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
3338   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
3339 
3340   numExclusivelyOwned = 0;
3341   numNonExclusivelyOwned = 0;
3342   for (i=0; i<pEnd-pStart; i++) {
3343     if (toBalance[i]) {
3344       if (degrees[i] > 0) {
3345         isNonExclusivelyOwned[i] = PETSC_TRUE;
3346         numNonExclusivelyOwned += 1;
3347       } else {
3348         if (!isLeaf[i]) {
3349           isExclusivelyOwned[i] = PETSC_TRUE;
3350           numExclusivelyOwned += 1;
3351         }
3352       }
3353     }
3354   }
3355 
3356   /* We are going to build a graph with one vertex per core representing the
3357    * exclusively owned points and then one vertex per nonExclusively owned
3358    * point. */
3359 
3360   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3361   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
3362   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3363   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
3364 
3365   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3366   for (i=0; i<pEnd-pStart; i++) {globalNumbersOfLocalOwnedVertices[i] = pStart - 1;}
3367   offset = cumSumVertices[rank];
3368   counter = 0;
3369   for (i=0; i<pEnd-pStart; i++) {
3370     if (toBalance[i]) {
3371       if (degrees[i] > 0) {
3372         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
3373         counter++;
3374       }
3375     }
3376   }
3377 
3378   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
3379   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
3380   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3381   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3382 
3383   /* Now start building the data structure for ParMETIS */
3384 
3385   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
3386   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
3387   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3388   ierr = MatSetUp(Apre);CHKERRQ(ierr);
3389 
3390   for (i=0; i<pEnd-pStart; i++) {
3391     if (toBalance[i]) {
3392       idx = cumSumVertices[rank];
3393       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3394       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3395       else continue;
3396       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3397       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3398     }
3399   }
3400 
3401   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3402   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3403 
3404   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
3405   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
3406   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3407   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
3408   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
3409 
3410   for (i=0; i<pEnd-pStart; i++) {
3411     if (toBalance[i]) {
3412       idx = cumSumVertices[rank];
3413       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3414       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3415       else continue;
3416       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3417       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3418     }
3419   }
3420 
3421   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3422   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3423   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
3424   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3425 
3426   nparts = size;
3427   wgtflag = 2;
3428   numflag = 0;
3429   ncon = 2;
3430   real_t *tpwgts;
3431   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
3432   for (i=0; i<ncon*nparts; i++) {
3433     tpwgts[i] = 1./(nparts);
3434   }
3435 
3436   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
3437   ubvec[0] = 1.01;
3438   ubvec[1] = 1.01;
3439   lenadjncy = 0;
3440   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3441     PetscInt temp=0;
3442     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3443     lenadjncy += temp;
3444     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3445   }
3446   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
3447   lenxadj = 2 + numNonExclusivelyOwned;
3448   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
3449   xadj[0] = 0;
3450   counter = 0;
3451   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3452     PetscInt        temp=0;
3453     const PetscInt *cols;
3454     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3455     ierr = PetscArraycpy(&adjncy[counter], cols, temp);CHKERRQ(ierr);
3456     counter += temp;
3457     xadj[i+1] = counter;
3458     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3459   }
3460 
3461   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
3462   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
3463   vtxwgt[0] = numExclusivelyOwned;
3464   if (ncon>1) vtxwgt[1] = 1;
3465   for (i=0; i<numNonExclusivelyOwned; i++) {
3466     vtxwgt[ncon*(i+1)] = 1;
3467     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3468   }
3469 
3470   if (viewer) {
3471     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
3472     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
3473   }
3474   if (parallel) {
3475     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
3476     options[0] = 1;
3477     options[1] = 0; /* Verbosity */
3478     options[2] = 0; /* Seed */
3479     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3480     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
3481     if (useInitialGuess) {
3482       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
3483       PetscStackPush("ParMETIS_V3_RefineKway");
3484       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3485       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3486       PetscStackPop;
3487     } else {
3488       PetscStackPush("ParMETIS_V3_PartKway");
3489       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3490       PetscStackPop;
3491       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3492     }
3493     ierr = PetscFree(options);CHKERRQ(ierr);
3494   } else {
3495     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
3496     Mat As;
3497     PetscInt numRows;
3498     PetscInt *partGlobal;
3499     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
3500 
3501     PetscInt *numExclusivelyOwnedAll;
3502     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
3503     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3504     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
3505 
3506     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
3507     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
3508     if (!rank) {
3509       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3510       lenadjncy = 0;
3511 
3512       for (i=0; i<numRows; i++) {
3513         PetscInt temp=0;
3514         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3515         lenadjncy += temp;
3516         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3517       }
3518       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
3519       lenxadj = 1 + numRows;
3520       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
3521       xadj_g[0] = 0;
3522       counter = 0;
3523       for (i=0; i<numRows; i++) {
3524         PetscInt        temp=0;
3525         const PetscInt *cols;
3526         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3527         ierr = PetscArraycpy(&adjncy_g[counter], cols, temp);CHKERRQ(ierr);
3528         counter += temp;
3529         xadj_g[i+1] = counter;
3530         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3531       }
3532       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
3533       for (i=0; i<size; i++){
3534         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3535         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3536         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3537           vtxwgt_g[ncon*j] = 1;
3538           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3539         }
3540       }
3541       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
3542       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
3543       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3544       options[METIS_OPTION_CONTIG] = 1;
3545       PetscStackPush("METIS_PartGraphKway");
3546       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3547       PetscStackPop;
3548       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3549       ierr = PetscFree(options);CHKERRQ(ierr);
3550       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
3551       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
3552       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
3553     }
3554     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
3555 
3556     /* Now scatter the parts array. */
3557     {
3558       PetscMPIInt *counts, *mpiCumSumVertices;
3559       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
3560       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
3561       for(i=0; i<size; i++) {
3562         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3563       }
3564       for(i=0; i<=size; i++) {
3565         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3566       }
3567       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3568       ierr = PetscFree(counts);CHKERRQ(ierr);
3569       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3570     }
3571 
3572     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3573     ierr = MatDestroy(&As);CHKERRQ(ierr);
3574   }
3575 
3576   ierr = MatDestroy(&A);CHKERRQ(ierr);
3577   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3578   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3579 
3580   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3581 
3582   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3583   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3584   firstVertices[rank] = part[0];
3585   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3586   for (i=0; i<size; i++) {
3587     renumbering[firstVertices[i]] = i;
3588   }
3589   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3590     part[i] = renumbering[part[i]];
3591   }
3592   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3593   failed = (PetscInt)(part[0] != rank);
3594   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3595 
3596   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3597   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3598 
3599   if (failedGlobal > 0) {
3600     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3601     ierr = PetscFree(xadj);CHKERRQ(ierr);
3602     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3603     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3604     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3605     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3606     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3607     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3608     ierr = PetscFree(part);CHKERRQ(ierr);
3609     if (viewer) {
3610       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3611       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3612     }
3613     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3614     PetscFunctionReturn(0);
3615   }
3616 
3617   /*Let's check how well we did distributing points*/
3618   if (viewer) {
3619     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3620     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3621     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3622     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3623     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3624   }
3625 
3626   /* Now check that every vertex is owned by a process that it is actually connected to. */
3627   for (i=1; i<=numNonExclusivelyOwned; i++) {
3628     PetscInt loc = 0;
3629     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3630     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3631     if (loc<0) {
3632       part[i] = rank;
3633     }
3634   }
3635 
3636   /* Let's see how significant the influences of the previous fixing up step was.*/
3637   if (viewer) {
3638     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3639     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3640   }
3641 
3642   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3643   ierr = PetscFree(xadj);CHKERRQ(ierr);
3644   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3645   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3646 
3647   /* Almost done, now rewrite the SF to reflect the new ownership. */
3648   {
3649     PetscInt *pointsToRewrite;
3650     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3651     counter = 0;
3652     for(i=0; i<pEnd-pStart; i++) {
3653       if (toBalance[i]) {
3654         if (isNonExclusivelyOwned[i]) {
3655           pointsToRewrite[counter] = i + pStart;
3656           counter++;
3657         }
3658       }
3659     }
3660     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3661     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3662   }
3663 
3664   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3665   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3666   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3667   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3668   ierr = PetscFree(part);CHKERRQ(ierr);
3669   if (viewer) {
3670     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3671     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3672   }
3673   if (success) *success = PETSC_TRUE;
3674   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3675 #else
3676   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3677 #endif
3678   PetscFunctionReturn(0);
3679 }
3680