xref: /petsc/src/dm/impls/plex/plexpartition.c (revision ef0bb6c736604ce380bf8bea4ebd4a7bda431d97)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {https://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PetscBool PTScotchPartitionercite = PETSC_FALSE;
31 const char PTScotchPartitionerCitation[] =
32   "@article{PTSCOTCH,\n"
33   "  author  = {C. Chevalier and F. Pellegrini},\n"
34   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
35   "  journal = {Parallel Computing},\n"
36   "  volume  = {34},\n"
37   "  number  = {6},\n"
38   "  pages   = {318--331},\n"
39   "  year    = {2008},\n"
40   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
41   "}\n";
42 
43 
44 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
45 
46 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
47 {
48   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
49   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
50   IS             cellNumbering;
51   const PetscInt *cellNum;
52   PetscBool      useCone, useClosure;
53   PetscSection   section;
54   PetscSegBuffer adjBuffer;
55   PetscSF        sfPoint;
56   PetscInt       *adjCells = NULL, *remoteCells = NULL;
57   const PetscInt *local;
58   PetscInt       nroots, nleaves, l;
59   PetscMPIInt    rank;
60   PetscErrorCode ierr;
61 
62   PetscFunctionBegin;
63   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
64   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
65   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
66   if (dim != depth) {
67     /* We do not handle the uninterpolated case here */
68     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
69     /* DMPlexCreateNeighborCSR does not make a numbering */
70     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
71     /* Different behavior for empty graphs */
72     if (!*numVertices) {
73       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
74       (*offsets)[0] = 0;
75     }
76     /* Broken in parallel */
77     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
78     PetscFunctionReturn(0);
79   }
80   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
81   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
82   /* Build adjacency graph via a section/segbuffer */
83   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
84   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
85   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
86   /* Always use FVM adjacency to create partitioner graph */
87   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
88   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
89   ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
90   if (globalNumbering) {
91     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
92     *globalNumbering = cellNumbering;
93   }
94   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
95   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
96      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
97    */
98   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
99   if (nroots >= 0) {
100     PetscInt fStart, fEnd, f;
101 
102     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
103     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
104     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
105     for (f = fStart; f < fEnd; ++f) {
106       const PetscInt *support;
107       PetscInt        supportSize;
108 
109       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
110       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
111       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
112       else if (supportSize == 2) {
113         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
114         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
115         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
116         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
117       }
118       /* Handle non-conforming meshes */
119       if (supportSize > 2) {
120         PetscInt        numChildren, i;
121         const PetscInt *children;
122 
123         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
124         for (i = 0; i < numChildren; ++i) {
125           const PetscInt child = children[i];
126           if (fStart <= child && child < fEnd) {
127             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
128             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
129             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
130             else if (supportSize == 2) {
131               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
132               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
133               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
134               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
135             }
136           }
137         }
138       }
139     }
140     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
141     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
142     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
143     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
144     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
145   }
146   /* Combine local and global adjacencies */
147   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
148     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
149     if (nroots > 0) {if (cellNum[p] < 0) continue;}
150     /* Add remote cells */
151     if (remoteCells) {
152       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
153       PetscInt       coneSize, numChildren, c, i;
154       const PetscInt *cone, *children;
155 
156       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
157       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
158       for (c = 0; c < coneSize; ++c) {
159         const PetscInt point = cone[c];
160         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
161           PetscInt *PETSC_RESTRICT pBuf;
162           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
163           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
164           *pBuf = remoteCells[point];
165         }
166         /* Handle non-conforming meshes */
167         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
168         for (i = 0; i < numChildren; ++i) {
169           const PetscInt child = children[i];
170           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
171             PetscInt *PETSC_RESTRICT pBuf;
172             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
173             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
174             *pBuf = remoteCells[child];
175           }
176         }
177       }
178     }
179     /* Add local cells */
180     adjSize = PETSC_DETERMINE;
181     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
182     for (a = 0; a < adjSize; ++a) {
183       const PetscInt point = adj[a];
184       if (point != p && pStart <= point && point < pEnd) {
185         PetscInt *PETSC_RESTRICT pBuf;
186         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
187         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
188         *pBuf = DMPlex_GlobalID(cellNum[point]);
189       }
190     }
191     (*numVertices)++;
192   }
193   ierr = PetscFree(adj);CHKERRQ(ierr);
194   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
195   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
196 
197   /* Derive CSR graph from section/segbuffer */
198   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
199   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
200   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
201   for (idx = 0, p = pStart; p < pEnd; p++) {
202     if (nroots > 0) {if (cellNum[p] < 0) continue;}
203     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
204   }
205   vOffsets[*numVertices] = size;
206   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
207 
208   if (nroots >= 0) {
209     /* Filter out duplicate edges using section/segbuffer */
210     ierr = PetscSectionReset(section);CHKERRQ(ierr);
211     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
212     for (p = 0; p < *numVertices; p++) {
213       PetscInt start = vOffsets[p], end = vOffsets[p+1];
214       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
215       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
216       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
217       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
218       ierr = PetscArraycpy(edges, &graph[start], numEdges);CHKERRQ(ierr);
219     }
220     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
221     ierr = PetscFree(graph);CHKERRQ(ierr);
222     /* Derive CSR graph from section/segbuffer */
223     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
224     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
225     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
226     for (idx = 0, p = 0; p < *numVertices; p++) {
227       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
228     }
229     vOffsets[*numVertices] = size;
230     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
231   } else {
232     /* Sort adjacencies (not strictly necessary) */
233     for (p = 0; p < *numVertices; p++) {
234       PetscInt start = vOffsets[p], end = vOffsets[p+1];
235       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
236     }
237   }
238 
239   if (offsets) *offsets = vOffsets;
240   if (adjacency) *adjacency = graph;
241 
242   /* Cleanup */
243   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
244   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
245   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
246   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
247   PetscFunctionReturn(0);
248 }
249 
250 static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
251 {
252   Mat            conn, CSR;
253   IS             fis, cis, cis_own;
254   PetscSF        sfPoint;
255   const PetscInt *rows, *cols, *ii, *jj;
256   PetscInt       *idxs,*idxs2;
257   PetscInt       dim, depth, floc, cloc, i, M, N, c, lm, m, cStart, cEnd, fStart, fEnd;
258   PetscMPIInt    rank;
259   PetscBool      flg;
260   PetscErrorCode ierr;
261 
262   PetscFunctionBegin;
263   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
264   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
265   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
266   if (dim != depth) {
267     /* We do not handle the uninterpolated case here */
268     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
269     /* DMPlexCreateNeighborCSR does not make a numbering */
270     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
271     /* Different behavior for empty graphs */
272     if (!*numVertices) {
273       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
274       (*offsets)[0] = 0;
275     }
276     /* Broken in parallel */
277     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
278     PetscFunctionReturn(0);
279   }
280   /* Interpolated and parallel case */
281 
282   /* numbering */
283   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
284   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
285   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
286   ierr = DMPlexCreateNumbering_Plex(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
287   ierr = DMPlexCreateNumbering_Plex(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
288   if (globalNumbering) {
289     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
290   }
291 
292   /* get positive global ids and local sizes for facets and cells */
293   ierr = ISGetLocalSize(fis, &m);CHKERRQ(ierr);
294   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
295   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
296   for (i = 0, floc = 0; i < m; i++) {
297     const PetscInt p = rows[i];
298 
299     if (p < 0) {
300       idxs[i] = -(p+1);
301     } else {
302       idxs[i] = p;
303       floc   += 1;
304     }
305   }
306   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
307   ierr = ISDestroy(&fis);CHKERRQ(ierr);
308   ierr = ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);CHKERRQ(ierr);
309 
310   ierr = ISGetLocalSize(cis, &m);CHKERRQ(ierr);
311   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
312   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
313   ierr = PetscMalloc1(m, &idxs2);CHKERRQ(ierr);
314   for (i = 0, cloc = 0; i < m; i++) {
315     const PetscInt p = cols[i];
316 
317     if (p < 0) {
318       idxs[i] = -(p+1);
319     } else {
320       idxs[i]       = p;
321       idxs2[cloc++] = p;
322     }
323   }
324   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
325   ierr = ISDestroy(&cis);CHKERRQ(ierr);
326   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);CHKERRQ(ierr);
327   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);CHKERRQ(ierr);
328 
329   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
330   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
331   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
332   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
333   ierr = DMPlexGetMaxSizes(dm, NULL, &lm);CHKERRQ(ierr);
334   ierr = MPI_Allreduce(&lm, &m, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject) dm));CHKERRQ(ierr);
335   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
336 
337   /* Assemble matrix */
338   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
339   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
340   for (c = cStart; c < cEnd; c++) {
341     const PetscInt *cone;
342     PetscInt        coneSize, row, col, f;
343 
344     col  = cols[c-cStart];
345     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
346     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
347     for (f = 0; f < coneSize; f++) {
348       const PetscScalar v = 1.0;
349       const PetscInt *children;
350       PetscInt        numChildren, ch;
351 
352       row  = rows[cone[f]-fStart];
353       ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
354 
355       /* non-conforming meshes */
356       ierr = DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);CHKERRQ(ierr);
357       for (ch = 0; ch < numChildren; ch++) {
358         const PetscInt child = children[ch];
359 
360         if (child < fStart || child >= fEnd) continue;
361         row  = rows[child-fStart];
362         ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
363       }
364     }
365   }
366   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
367   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
368   ierr = ISDestroy(&fis);CHKERRQ(ierr);
369   ierr = ISDestroy(&cis);CHKERRQ(ierr);
370   ierr = MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
371   ierr = MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
372 
373   /* Get parallel CSR by doing conn^T * conn */
374   ierr = MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);CHKERRQ(ierr);
375   ierr = MatDestroy(&conn);CHKERRQ(ierr);
376 
377   /* extract local part of the CSR */
378   ierr = MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);CHKERRQ(ierr);
379   ierr = MatDestroy(&CSR);CHKERRQ(ierr);
380   ierr = MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
381   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
382 
383   /* get back requested output */
384   if (numVertices) *numVertices = m;
385   if (offsets) {
386     ierr = PetscCalloc1(m+1, &idxs);CHKERRQ(ierr);
387     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
388     *offsets = idxs;
389   }
390   if (adjacency) {
391     ierr = PetscMalloc1(ii[m] - m, &idxs);CHKERRQ(ierr);
392     ierr = ISGetIndices(cis_own, &rows);CHKERRQ(ierr);
393     for (i = 0, c = 0; i < m; i++) {
394       PetscInt j, g = rows[i];
395 
396       for (j = ii[i]; j < ii[i+1]; j++) {
397         if (jj[j] == g) continue; /* again, self-connectivity */
398         idxs[c++] = jj[j];
399       }
400     }
401     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
402     ierr = ISRestoreIndices(cis_own, &rows);CHKERRQ(ierr);
403     *adjacency = idxs;
404   }
405 
406   /* cleanup */
407   ierr = ISDestroy(&cis_own);CHKERRQ(ierr);
408   ierr = MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
409   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
410   ierr = MatDestroy(&conn);CHKERRQ(ierr);
411   PetscFunctionReturn(0);
412 }
413 
414 /*@C
415   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
416 
417   Input Parameters:
418 + dm      - The mesh DM dm
419 - height  - Height of the strata from which to construct the graph
420 
421   Output Parameter:
422 + numVertices     - Number of vertices in the graph
423 . offsets         - Point offsets in the graph
424 . adjacency       - Point connectivity in the graph
425 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
426 
427   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
428   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().
429 
430   Level: developer
431 
432 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
433 @*/
434 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
435 {
436   PetscErrorCode ierr;
437   PetscBool      usemat = PETSC_FALSE;
438 
439   PetscFunctionBegin;
440   ierr = PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);CHKERRQ(ierr);
441   if (usemat) {
442     ierr = DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
443   } else {
444     ierr = DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
445   }
446   PetscFunctionReturn(0);
447 }
448 
449 /*@C
450   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
451 
452   Collective on DM
453 
454   Input Arguments:
455 + dm - The DMPlex
456 - cellHeight - The height of mesh points to treat as cells (default should be 0)
457 
458   Output Arguments:
459 + numVertices - The number of local vertices in the graph, or cells in the mesh.
460 . offsets     - The offset to the adjacency list for each cell
461 - adjacency   - The adjacency list for all cells
462 
463   Note: This is suitable for input to a mesh partitioner like ParMetis.
464 
465   Level: advanced
466 
467 .seealso: DMPlexCreate()
468 @*/
469 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
470 {
471   const PetscInt maxFaceCases = 30;
472   PetscInt       numFaceCases = 0;
473   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
474   PetscInt      *off, *adj;
475   PetscInt      *neighborCells = NULL;
476   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
477   PetscErrorCode ierr;
478 
479   PetscFunctionBegin;
480   /* For parallel partitioning, I think you have to communicate supports */
481   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
482   cellDim = dim - cellHeight;
483   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
484   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
485   if (cEnd - cStart == 0) {
486     if (numVertices) *numVertices = 0;
487     if (offsets)   *offsets   = NULL;
488     if (adjacency) *adjacency = NULL;
489     PetscFunctionReturn(0);
490   }
491   numCells  = cEnd - cStart;
492   faceDepth = depth - cellHeight;
493   if (dim == depth) {
494     PetscInt f, fStart, fEnd;
495 
496     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
497     /* Count neighboring cells */
498     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
499     for (f = fStart; f < fEnd; ++f) {
500       const PetscInt *support;
501       PetscInt        supportSize;
502       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
503       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
504       if (supportSize == 2) {
505         PetscInt numChildren;
506 
507         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
508         if (!numChildren) {
509           ++off[support[0]-cStart+1];
510           ++off[support[1]-cStart+1];
511         }
512       }
513     }
514     /* Prefix sum */
515     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
516     if (adjacency) {
517       PetscInt *tmp;
518 
519       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
520       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
521       ierr = PetscArraycpy(tmp, off, numCells+1);CHKERRQ(ierr);
522       /* Get neighboring cells */
523       for (f = fStart; f < fEnd; ++f) {
524         const PetscInt *support;
525         PetscInt        supportSize;
526         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
527         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
528         if (supportSize == 2) {
529           PetscInt numChildren;
530 
531           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
532           if (!numChildren) {
533             adj[tmp[support[0]-cStart]++] = support[1];
534             adj[tmp[support[1]-cStart]++] = support[0];
535           }
536         }
537       }
538 #if defined(PETSC_USE_DEBUG)
539       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
540 #endif
541       ierr = PetscFree(tmp);CHKERRQ(ierr);
542     }
543     if (numVertices) *numVertices = numCells;
544     if (offsets)   *offsets   = off;
545     if (adjacency) *adjacency = adj;
546     PetscFunctionReturn(0);
547   }
548   /* Setup face recognition */
549   if (faceDepth == 1) {
550     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
551 
552     for (c = cStart; c < cEnd; ++c) {
553       PetscInt corners;
554 
555       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
556       if (!cornersSeen[corners]) {
557         PetscInt nFV;
558 
559         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
560         cornersSeen[corners] = 1;
561 
562         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
563 
564         numFaceVertices[numFaceCases++] = nFV;
565       }
566     }
567   }
568   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
569   /* Count neighboring cells */
570   for (cell = cStart; cell < cEnd; ++cell) {
571     PetscInt numNeighbors = PETSC_DETERMINE, n;
572 
573     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
574     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
575     for (n = 0; n < numNeighbors; ++n) {
576       PetscInt        cellPair[2];
577       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
578       PetscInt        meetSize = 0;
579       const PetscInt *meet    = NULL;
580 
581       cellPair[0] = cell; cellPair[1] = neighborCells[n];
582       if (cellPair[0] == cellPair[1]) continue;
583       if (!found) {
584         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
585         if (meetSize) {
586           PetscInt f;
587 
588           for (f = 0; f < numFaceCases; ++f) {
589             if (numFaceVertices[f] == meetSize) {
590               found = PETSC_TRUE;
591               break;
592             }
593           }
594         }
595         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
596       }
597       if (found) ++off[cell-cStart+1];
598     }
599   }
600   /* Prefix sum */
601   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
602 
603   if (adjacency) {
604     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
605     /* Get neighboring cells */
606     for (cell = cStart; cell < cEnd; ++cell) {
607       PetscInt numNeighbors = PETSC_DETERMINE, n;
608       PetscInt cellOffset   = 0;
609 
610       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
611       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
612       for (n = 0; n < numNeighbors; ++n) {
613         PetscInt        cellPair[2];
614         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
615         PetscInt        meetSize = 0;
616         const PetscInt *meet    = NULL;
617 
618         cellPair[0] = cell; cellPair[1] = neighborCells[n];
619         if (cellPair[0] == cellPair[1]) continue;
620         if (!found) {
621           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
622           if (meetSize) {
623             PetscInt f;
624 
625             for (f = 0; f < numFaceCases; ++f) {
626               if (numFaceVertices[f] == meetSize) {
627                 found = PETSC_TRUE;
628                 break;
629               }
630             }
631           }
632           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
633         }
634         if (found) {
635           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
636           ++cellOffset;
637         }
638       }
639     }
640   }
641   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
642   if (numVertices) *numVertices = numCells;
643   if (offsets)   *offsets   = off;
644   if (adjacency) *adjacency = adj;
645   PetscFunctionReturn(0);
646 }
647 
648 /*@C
649   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
650 
651   Not Collective
652 
653   Input Parameters:
654 + name        - The name of a new user-defined creation routine
655 - create_func - The creation routine itself
656 
657   Notes:
658   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
659 
660   Sample usage:
661 .vb
662     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
663 .ve
664 
665   Then, your PetscPartitioner type can be chosen with the procedural interface via
666 .vb
667     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
668     PetscPartitionerSetType(PetscPartitioner, "my_part");
669 .ve
670    or at runtime via the option
671 .vb
672     -petscpartitioner_type my_part
673 .ve
674 
675   Level: advanced
676 
677 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
678 
679 @*/
680 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
681 {
682   PetscErrorCode ierr;
683 
684   PetscFunctionBegin;
685   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
686   PetscFunctionReturn(0);
687 }
688 
689 /*@C
690   PetscPartitionerSetType - Builds a particular PetscPartitioner
691 
692   Collective on PetscPartitioner
693 
694   Input Parameters:
695 + part - The PetscPartitioner object
696 - name - The kind of partitioner
697 
698   Options Database Key:
699 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
700 
701   Level: intermediate
702 
703 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
704 @*/
705 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
706 {
707   PetscErrorCode (*r)(PetscPartitioner);
708   PetscBool      match;
709   PetscErrorCode ierr;
710 
711   PetscFunctionBegin;
712   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
713   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
714   if (match) PetscFunctionReturn(0);
715 
716   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
717   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
718 
719   if (part->ops->destroy) {
720     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
721   }
722   part->noGraph = PETSC_FALSE;
723   ierr = PetscMemzero(part->ops, sizeof(*part->ops));CHKERRQ(ierr);
724   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
725   ierr = (*r)(part);CHKERRQ(ierr);
726   PetscFunctionReturn(0);
727 }
728 
729 /*@C
730   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
731 
732   Not Collective
733 
734   Input Parameter:
735 . part - The PetscPartitioner
736 
737   Output Parameter:
738 . name - The PetscPartitioner type name
739 
740   Level: intermediate
741 
742 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
743 @*/
744 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
745 {
746   PetscFunctionBegin;
747   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
748   PetscValidPointer(name, 2);
749   *name = ((PetscObject) part)->type_name;
750   PetscFunctionReturn(0);
751 }
752 
753 /*@C
754    PetscPartitionerViewFromOptions - View from Options
755 
756    Collective on PetscPartitioner
757 
758    Input Parameters:
759 +  A - the PetscPartitioner object
760 .  obj - Optional object
761 -  name - command line option
762 
763    Level: intermediate
764 .seealso:  PetscPartitionerView(), PetscObjectViewFromOptions()
765 @*/
766 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner A,PetscObject obj,const char name[])
767 {
768   PetscErrorCode ierr;
769 
770   PetscFunctionBegin;
771   PetscValidHeaderSpecific(A,PETSCPARTITIONER_CLASSID,1);
772   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
773   PetscFunctionReturn(0);
774 }
775 
776 /*@C
777   PetscPartitionerView - Views a PetscPartitioner
778 
779   Collective on PetscPartitioner
780 
781   Input Parameter:
782 + part - the PetscPartitioner object to view
783 - v    - the viewer
784 
785   Level: developer
786 
787 .seealso: PetscPartitionerDestroy()
788 @*/
789 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
790 {
791   PetscMPIInt    size;
792   PetscBool      isascii;
793   PetscErrorCode ierr;
794 
795   PetscFunctionBegin;
796   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
797   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
798   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
799   if (isascii) {
800     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
801     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
802     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
803     ierr = PetscViewerASCIIPrintf(v, "  edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
804     ierr = PetscViewerASCIIPrintf(v, "  balance: %.2g\n", part->balance);CHKERRQ(ierr);
805     ierr = PetscViewerASCIIPrintf(v, "  use vertex weights: %d\n", part->usevwgt);CHKERRQ(ierr);
806   }
807   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
808   PetscFunctionReturn(0);
809 }
810 
811 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
812 {
813   PetscFunctionBegin;
814   if (!currentType) {
815 #if defined(PETSC_HAVE_PARMETIS)
816     *defaultType = PETSCPARTITIONERPARMETIS;
817 #elif defined(PETSC_HAVE_PTSCOTCH)
818     *defaultType = PETSCPARTITIONERPTSCOTCH;
819 #elif defined(PETSC_HAVE_CHACO)
820     *defaultType = PETSCPARTITIONERCHACO;
821 #else
822     *defaultType = PETSCPARTITIONERSIMPLE;
823 #endif
824   } else {
825     *defaultType = currentType;
826   }
827   PetscFunctionReturn(0);
828 }
829 
830 /*@
831   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
832 
833   Collective on PetscPartitioner
834 
835   Input Parameter:
836 . part - the PetscPartitioner object to set options for
837 
838   Options Database Keys:
839 +  -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
840 .  -petscpartitioner_use_vertex_weights - Uses weights associated with the graph vertices
841 -  -petscpartitioner_view_graph - View the graph each time PetscPartitionerPartition is called. Viewer can be customized, see PetscOptionsGetViewer()
842 
843   Level: developer
844 
845 .seealso: PetscPartitionerView(), PetscPartitionerSetType(), PetscPartitionerPartition()
846 @*/
847 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
848 {
849   const char    *defaultType;
850   char           name[256];
851   PetscBool      flg;
852   PetscErrorCode ierr;
853 
854   PetscFunctionBegin;
855   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
856   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
857   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
858   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
859   if (flg) {
860     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
861   } else if (!((PetscObject) part)->type_name) {
862     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
863   }
864   ierr = PetscOptionsBool("-petscpartitioner_use_vertex_weights","Use vertex weights","",part->usevwgt,&part->usevwgt,NULL);CHKERRQ(ierr);
865   if (part->ops->setfromoptions) {
866     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
867   }
868   ierr = PetscViewerDestroy(&part->viewer);CHKERRQ(ierr);
869   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
870   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view", &part->viewer, NULL, NULL);CHKERRQ(ierr);
871   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, NULL, &part->viewGraph);CHKERRQ(ierr);
872   /* process any options handlers added with PetscObjectAddOptionsHandler() */
873   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
874   ierr = PetscOptionsEnd();CHKERRQ(ierr);
875   PetscFunctionReturn(0);
876 }
877 
878 /*@C
879   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
880 
881   Collective on PetscPartitioner
882 
883   Input Parameter:
884 . part - the PetscPartitioner object to setup
885 
886   Level: developer
887 
888 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
889 @*/
890 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
891 {
892   PetscErrorCode ierr;
893 
894   PetscFunctionBegin;
895   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
896   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
897   PetscFunctionReturn(0);
898 }
899 
900 /*@
901   PetscPartitionerDestroy - Destroys a PetscPartitioner object
902 
903   Collective on PetscPartitioner
904 
905   Input Parameter:
906 . part - the PetscPartitioner object to destroy
907 
908   Level: developer
909 
910 .seealso: PetscPartitionerView()
911 @*/
912 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
913 {
914   PetscErrorCode ierr;
915 
916   PetscFunctionBegin;
917   if (!*part) PetscFunctionReturn(0);
918   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
919 
920   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
921   ((PetscObject) (*part))->refct = 0;
922 
923   ierr = PetscViewerDestroy(&(*part)->viewer);CHKERRQ(ierr);
924   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
925   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
926   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
927   PetscFunctionReturn(0);
928 }
929 
930 /*@
931   PetscPartitionerPartition - Partition a graph
932 
933   Collective on PetscPartitioner
934 
935   Input Parameters:
936 + part    - The PetscPartitioner
937 . nparts  - Number of partitions
938 . numVertices - Number of vertices in the local part of the graph
939 . start - row pointers for the local part of the graph (CSR style)
940 . adjacency - adjacency list (CSR style)
941 . vertexSection - PetscSection describing the absolute weight of each local vertex (can be NULL)
942 - targetSection - PetscSection describing the absolute weight of each partition (can be NULL)
943 
944   Output Parameters:
945 + partSection     - The PetscSection giving the division of points by partition
946 - partition       - The list of points by partition
947 
948   Options Database:
949 . -petscpartitioner_view - View the partitioner information
950 . -petscpartitioner_view_graph - View the graph we are partitioning
951 
952   Notes:
953     The chart of the vertexSection (if present) must contain [0,numVertices), with the number of dofs in the section specifying the absolute weight for each vertex.
954     The chart of the targetSection (if present) must contain [0,nparts), with the number of dofs in the section specifying the absolute weight for each partition. This information must be the same across processes, PETSc does not check it.
955 
956   Level: developer
957 
958 .seealso PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscSectionSetDof()
959 @*/
960 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertexSection, PetscSection targetSection, PetscSection partSection, IS *partition)
961 {
962   PetscErrorCode ierr;
963 
964   PetscFunctionBegin;
965   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
966   PetscValidLogicalCollectiveInt(part, nparts, 2);
967   if (nparts <= 0) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Number of parts must be positive");
968   if (numVertices < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices must be non-negative");
969   if (numVertices && !part->noGraph) {
970     PetscValidIntPointer(start, 4);
971     PetscValidIntPointer(adjacency, 5);
972   }
973   if (vertexSection) {
974     PetscInt s,e;
975 
976     PetscValidHeaderSpecific(vertexSection, PETSC_SECTION_CLASSID, 6);
977     ierr = PetscSectionGetChart(vertexSection, &s, &e);CHKERRQ(ierr);
978     if (s > 0 || e < numVertices) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid vertexSection chart [%D,%D)",s,e);
979   }
980   if (targetSection) {
981     PetscInt s,e;
982 
983     PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 7);
984     ierr = PetscSectionGetChart(targetSection, &s, &e);CHKERRQ(ierr);
985     if (s > 0 || e < nparts) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid targetSection chart [%D,%D)",s,e);
986   }
987   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 8);
988   PetscValidPointer(partition, 9);
989 
990   ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
991   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
992   if (nparts == 1) { /* quick */
993     ierr = PetscSectionSetDof(partSection, 0, numVertices);CHKERRQ(ierr);
994     ierr = ISCreateStride(PetscObjectComm((PetscObject)part),numVertices,0,1,partition);CHKERRQ(ierr);
995   } else {
996     if (!part->ops->partition) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "PetscPartitioner %s has no partitioning method", ((PetscObject)part)->type_name);
997     ierr = (*part->ops->partition)(part, nparts, numVertices, start, adjacency, vertexSection, targetSection, partSection, partition);CHKERRQ(ierr);
998   }
999   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1000   if (part->viewerGraph) {
1001     PetscViewer viewer = part->viewerGraph;
1002     PetscBool   isascii;
1003     PetscInt    v, i;
1004     PetscMPIInt rank;
1005 
1006     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
1007     ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
1008     if (isascii) {
1009       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1010       ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
1011       for (v = 0; v < numVertices; ++v) {
1012         const PetscInt s = start[v];
1013         const PetscInt e = start[v+1];
1014 
1015         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
1016         for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
1017         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
1018       }
1019       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1020       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1021     }
1022   }
1023   if (part->viewer) {
1024     ierr = PetscPartitionerView(part,part->viewer);CHKERRQ(ierr);
1025   }
1026   PetscFunctionReturn(0);
1027 }
1028 
1029 /*@
1030   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
1031 
1032   Collective
1033 
1034   Input Parameter:
1035 . comm - The communicator for the PetscPartitioner object
1036 
1037   Output Parameter:
1038 . part - The PetscPartitioner object
1039 
1040   Level: beginner
1041 
1042 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
1043 @*/
1044 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
1045 {
1046   PetscPartitioner p;
1047   const char       *partitionerType = NULL;
1048   PetscErrorCode   ierr;
1049 
1050   PetscFunctionBegin;
1051   PetscValidPointer(part, 2);
1052   *part = NULL;
1053   ierr = DMInitializePackage();CHKERRQ(ierr);
1054 
1055   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
1056   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
1057   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
1058 
1059   p->edgeCut = 0;
1060   p->balance = 0.0;
1061   p->usevwgt = PETSC_TRUE;
1062 
1063   *part = p;
1064   PetscFunctionReturn(0);
1065 }
1066 
1067 /*@
1068   PetscPartitionerDMPlexPartition - Create a non-overlapping partition of the cells in the mesh
1069 
1070   Collective on PetscPartitioner
1071 
1072   Input Parameters:
1073 + part    - The PetscPartitioner
1074 . targetSection - The PetscSection describing the absolute weight of each partition (can be NULL)
1075 - dm      - The mesh DM
1076 
1077   Output Parameters:
1078 + partSection     - The PetscSection giving the division of points by partition
1079 - partition       - The list of points by partition
1080 
1081   Notes:
1082     If the DM has a local section associated, each point to be partitioned will be weighted by the total number of dofs identified
1083     by the section in the transitive closure of the point.
1084 
1085   Level: developer
1086 
1087 .seealso DMPlexDistribute(), PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscPartitionerPartition()
1088 @*/
1089 PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner part, DM dm, PetscSection targetSection, PetscSection partSection, IS *partition)
1090 {
1091   PetscMPIInt    size;
1092   PetscBool      isplex;
1093   PetscErrorCode ierr;
1094   PetscSection   vertSection = NULL;
1095 
1096   PetscFunctionBegin;
1097   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1098   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
1099   if (targetSection) PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 3);
1100   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
1101   PetscValidPointer(partition, 5);
1102   ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&isplex);CHKERRQ(ierr);
1103   if (!isplex) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Not for type %s",((PetscObject)dm)->type_name);
1104   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
1105   if (size == 1) {
1106     PetscInt *points;
1107     PetscInt  cStart, cEnd, c;
1108 
1109     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1110     ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
1111     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
1112     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
1113     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1114     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
1115     for (c = cStart; c < cEnd; ++c) points[c] = c;
1116     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1117     PetscFunctionReturn(0);
1118   }
1119   if (part->height == 0) {
1120     PetscInt numVertices = 0;
1121     PetscInt *start     = NULL;
1122     PetscInt *adjacency = NULL;
1123     IS       globalNumbering;
1124 
1125     if (!part->noGraph || part->viewGraph) {
1126       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
1127     } else { /* only compute the number of owned local vertices */
1128       const PetscInt *idxs;
1129       PetscInt       p, pStart, pEnd;
1130 
1131       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1132       ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
1133       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1134       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
1135       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1136     }
1137     if (part->usevwgt) {
1138       PetscSection   section = dm->localSection, clSection = NULL;
1139       IS             clPoints = NULL;
1140       const PetscInt *gid,*clIdx;
1141       PetscInt       v, p, pStart, pEnd;
1142 
1143       /* dm->localSection encodes degrees of freedom per point, not per cell. We need to get the closure index to properly specify cell weights (aka dofs) */
1144       /* We do this only if the local section has been set */
1145       if (section) {
1146         ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, NULL);CHKERRQ(ierr);
1147         if (!clSection) {
1148           ierr = DMPlexCreateClosureIndex(dm,NULL);CHKERRQ(ierr);
1149         }
1150         ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, &clPoints);CHKERRQ(ierr);
1151         ierr = ISGetIndices(clPoints,&clIdx);CHKERRQ(ierr);
1152       }
1153       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1154       ierr = PetscSectionCreate(PETSC_COMM_SELF, &vertSection);CHKERRQ(ierr);
1155       ierr = PetscSectionSetChart(vertSection, 0, numVertices);CHKERRQ(ierr);
1156       if (globalNumbering) {
1157         ierr = ISGetIndices(globalNumbering,&gid);CHKERRQ(ierr);
1158       } else gid = NULL;
1159       for (p = pStart, v = 0; p < pEnd; ++p) {
1160         PetscInt dof = 1;
1161 
1162         /* skip cells in the overlap */
1163         if (gid && gid[p-pStart] < 0) continue;
1164 
1165         if (section) {
1166           PetscInt cl, clSize, clOff;
1167 
1168           dof  = 0;
1169           ierr = PetscSectionGetDof(clSection, p, &clSize);CHKERRQ(ierr);
1170           ierr = PetscSectionGetOffset(clSection, p, &clOff);CHKERRQ(ierr);
1171           for (cl = 0; cl < clSize; cl+=2) {
1172             PetscInt clDof, clPoint = clIdx[clOff + cl]; /* odd indices are reserved for orientations */
1173 
1174             ierr = PetscSectionGetDof(section, clPoint, &clDof);CHKERRQ(ierr);
1175             dof += clDof;
1176           }
1177         }
1178         if (!dof) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of dofs for point %D in the local section should be positive",p);
1179         ierr = PetscSectionSetDof(vertSection, v, dof);CHKERRQ(ierr);
1180         v++;
1181       }
1182       if (globalNumbering) {
1183         ierr = ISRestoreIndices(globalNumbering,&gid);CHKERRQ(ierr);
1184       }
1185       if (clPoints) {
1186         ierr = ISRestoreIndices(clPoints,&clIdx);CHKERRQ(ierr);
1187       }
1188       ierr = PetscSectionSetUp(vertSection);CHKERRQ(ierr);
1189     }
1190     ierr = PetscPartitionerPartition(part, size, numVertices, start, adjacency, vertSection, targetSection, partSection, partition);CHKERRQ(ierr);
1191     ierr = PetscFree(start);CHKERRQ(ierr);
1192     ierr = PetscFree(adjacency);CHKERRQ(ierr);
1193     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1194       const PetscInt *globalNum;
1195       const PetscInt *partIdx;
1196       PetscInt       *map, cStart, cEnd;
1197       PetscInt       *adjusted, i, localSize, offset;
1198       IS             newPartition;
1199 
1200       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
1201       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
1202       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1203       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
1204       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
1205       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1206       for (i = cStart, offset = 0; i < cEnd; i++) {
1207         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1208       }
1209       for (i = 0; i < localSize; i++) {
1210         adjusted[i] = map[partIdx[i]];
1211       }
1212       ierr = PetscFree(map);CHKERRQ(ierr);
1213       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
1214       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1215       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
1216       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
1217       ierr = ISDestroy(partition);CHKERRQ(ierr);
1218       *partition = newPartition;
1219     }
1220   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1221   ierr = PetscSectionDestroy(&vertSection);CHKERRQ(ierr);
1222   PetscFunctionReturn(0);
1223 }
1224 
1225 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1226 {
1227   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1228   PetscErrorCode          ierr;
1229 
1230   PetscFunctionBegin;
1231   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1232   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1233   ierr = PetscFree(p);CHKERRQ(ierr);
1234   PetscFunctionReturn(0);
1235 }
1236 
1237 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1238 {
1239   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1240   PetscErrorCode          ierr;
1241 
1242   PetscFunctionBegin;
1243   if (p->random) {
1244     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1245     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
1246     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1247   }
1248   PetscFunctionReturn(0);
1249 }
1250 
1251 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1252 {
1253   PetscBool      iascii;
1254   PetscErrorCode ierr;
1255 
1256   PetscFunctionBegin;
1257   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1258   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1259   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1260   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
1261   PetscFunctionReturn(0);
1262 }
1263 
1264 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1265 {
1266   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1267   PetscErrorCode          ierr;
1268 
1269   PetscFunctionBegin;
1270   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
1271   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
1272   ierr = PetscOptionsTail();CHKERRQ(ierr);
1273   PetscFunctionReturn(0);
1274 }
1275 
1276 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1277 {
1278   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1279   PetscInt                np;
1280   PetscErrorCode          ierr;
1281 
1282   PetscFunctionBegin;
1283   if (p->random) {
1284     PetscRandom r;
1285     PetscInt   *sizes, *points, v, p;
1286     PetscMPIInt rank;
1287 
1288     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) part), &rank);CHKERRQ(ierr);
1289     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1290     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
1291     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
1292     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
1293     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1294     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1295     for (v = numVertices-1; v > 0; --v) {
1296       PetscReal val;
1297       PetscInt  w, tmp;
1298 
1299       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
1300       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
1301       w    = PetscFloorReal(val);
1302       tmp       = points[v];
1303       points[v] = points[w];
1304       points[w] = tmp;
1305     }
1306     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1307     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
1308     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
1309   }
1310   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1311   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
1312   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1313   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
1314   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1315   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
1316   *partition = p->partition;
1317   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
1318   PetscFunctionReturn(0);
1319 }
1320 
1321 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1322 {
1323   PetscFunctionBegin;
1324   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1325   part->ops->view           = PetscPartitionerView_Shell;
1326   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1327   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1328   part->ops->partition      = PetscPartitionerPartition_Shell;
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 /*MC
1333   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
1334 
1335   Level: intermediate
1336 
1337   Options Database Keys:
1338 .  -petscpartitioner_shell_random - Use a random partition
1339 
1340 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1341 M*/
1342 
1343 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1344 {
1345   PetscPartitioner_Shell *p;
1346   PetscErrorCode          ierr;
1347 
1348   PetscFunctionBegin;
1349   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1350   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1351   part->data = p;
1352 
1353   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1354   p->random = PETSC_FALSE;
1355   PetscFunctionReturn(0);
1356 }
1357 
1358 /*@C
1359   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1360 
1361   Collective on PetscPartitioner
1362 
1363   Input Parameters:
1364 + part   - The PetscPartitioner
1365 . size   - The number of partitions
1366 . sizes  - array of length size (or NULL) providing the number of points in each partition
1367 - points - array of length sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1368 
1369   Level: developer
1370 
1371   Notes:
1372     It is safe to free the sizes and points arrays after use in this routine.
1373 
1374 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1375 @*/
1376 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1377 {
1378   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1379   PetscInt                proc, numPoints;
1380   PetscErrorCode          ierr;
1381 
1382   PetscFunctionBegin;
1383   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1384   if (sizes)  {PetscValidPointer(sizes, 3);}
1385   if (points) {PetscValidPointer(points, 4);}
1386   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1387   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1388   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1389   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1390   if (sizes) {
1391     for (proc = 0; proc < size; ++proc) {
1392       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1393     }
1394   }
1395   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1396   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1397   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1398   PetscFunctionReturn(0);
1399 }
1400 
1401 /*@
1402   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1403 
1404   Collective on PetscPartitioner
1405 
1406   Input Parameters:
1407 + part   - The PetscPartitioner
1408 - random - The flag to use a random partition
1409 
1410   Level: intermediate
1411 
1412 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1413 @*/
1414 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1415 {
1416   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1417 
1418   PetscFunctionBegin;
1419   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1420   p->random = random;
1421   PetscFunctionReturn(0);
1422 }
1423 
1424 /*@
1425   PetscPartitionerShellGetRandom - get the flag to use a random partition
1426 
1427   Collective on PetscPartitioner
1428 
1429   Input Parameter:
1430 . part   - The PetscPartitioner
1431 
1432   Output Parameter
1433 . random - The flag to use a random partition
1434 
1435   Level: intermediate
1436 
1437 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1438 @*/
1439 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1440 {
1441   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1442 
1443   PetscFunctionBegin;
1444   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1445   PetscValidPointer(random, 2);
1446   *random = p->random;
1447   PetscFunctionReturn(0);
1448 }
1449 
1450 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1451 {
1452   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1453   PetscErrorCode          ierr;
1454 
1455   PetscFunctionBegin;
1456   ierr = PetscFree(p);CHKERRQ(ierr);
1457   PetscFunctionReturn(0);
1458 }
1459 
1460 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1461 {
1462   PetscFunctionBegin;
1463   PetscFunctionReturn(0);
1464 }
1465 
1466 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1467 {
1468   PetscBool      iascii;
1469   PetscErrorCode ierr;
1470 
1471   PetscFunctionBegin;
1472   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1473   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1474   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1475   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1476   PetscFunctionReturn(0);
1477 }
1478 
1479 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1480 {
1481   MPI_Comm       comm;
1482   PetscInt       np, *tpwgts = NULL, sumw = 0, numVerticesGlobal  = 0;
1483   PetscMPIInt    size;
1484   PetscErrorCode ierr;
1485 
1486   PetscFunctionBegin;
1487   if (vertSection) { ierr = PetscInfo(part,"PETSCPARTITIONERSIMPLE ignores vertex weights\n");CHKERRQ(ierr); }
1488   comm = PetscObjectComm((PetscObject)part);
1489   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1490   if (targetSection) {
1491     ierr = MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
1492     ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
1493     for (np = 0; np < nparts; ++np) {
1494       ierr = PetscSectionGetDof(targetSection,np,&tpwgts[np]);CHKERRQ(ierr);
1495       sumw += tpwgts[np];
1496     }
1497     if (!sumw) {
1498       ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1499     } else {
1500       PetscInt m,mp;
1501       for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np]*numVerticesGlobal)/sumw;
1502       for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
1503         if (m < tpwgts[np]) { m = tpwgts[np]; mp = np; }
1504         sumw += tpwgts[np];
1505       }
1506       if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
1507     }
1508   }
1509 
1510   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1511   if (size == 1) {
1512     if (tpwgts) {
1513       for (np = 0; np < nparts; ++np) {
1514         ierr = PetscSectionSetDof(partSection, np, tpwgts[np]);CHKERRQ(ierr);
1515       }
1516     } else {
1517       for (np = 0; np < nparts; ++np) {
1518         ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);
1519       }
1520     }
1521   } else {
1522     if (tpwgts) {
1523       Vec         v;
1524       PetscScalar *array;
1525       PetscInt    st,j;
1526       PetscMPIInt rank;
1527 
1528       ierr = VecCreate(comm,&v);CHKERRQ(ierr);
1529       ierr = VecSetSizes(v,numVertices,numVerticesGlobal);CHKERRQ(ierr);
1530       ierr = VecSetType(v,VECSTANDARD);CHKERRQ(ierr);
1531       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1532       for (np = 0,st = 0; np < nparts; ++np) {
1533         if (rank == np || (rank == size-1 && size < nparts && np >= size)) {
1534           for (j = 0; j < tpwgts[np]; j++) {
1535             ierr = VecSetValue(v,st+j,np,INSERT_VALUES);CHKERRQ(ierr);
1536           }
1537         }
1538         st += tpwgts[np];
1539       }
1540       ierr = VecAssemblyBegin(v);CHKERRQ(ierr);
1541       ierr = VecAssemblyEnd(v);CHKERRQ(ierr);
1542       ierr = VecGetArray(v,&array);CHKERRQ(ierr);
1543       for (j = 0; j < numVertices; ++j) {
1544         ierr = PetscSectionAddDof(partSection,PetscRealPart(array[j]),1);CHKERRQ(ierr);
1545       }
1546       ierr = VecRestoreArray(v,&array);CHKERRQ(ierr);
1547       ierr = VecDestroy(&v);CHKERRQ(ierr);
1548     } else {
1549       PetscMPIInt rank;
1550       PetscInt nvGlobal, *offsets, myFirst, myLast;
1551 
1552       ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1553       offsets[0] = 0;
1554       ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1555       for (np = 2; np <= size; np++) {
1556         offsets[np] += offsets[np-1];
1557       }
1558       nvGlobal = offsets[size];
1559       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1560       myFirst = offsets[rank];
1561       myLast  = offsets[rank + 1] - 1;
1562       ierr = PetscFree(offsets);CHKERRQ(ierr);
1563       if (numVertices) {
1564         PetscInt firstPart = 0, firstLargePart = 0;
1565         PetscInt lastPart = 0, lastLargePart = 0;
1566         PetscInt rem = nvGlobal % nparts;
1567         PetscInt pSmall = nvGlobal/nparts;
1568         PetscInt pBig = nvGlobal/nparts + 1;
1569 
1570         if (rem) {
1571           firstLargePart = myFirst / pBig;
1572           lastLargePart  = myLast  / pBig;
1573 
1574           if (firstLargePart < rem) {
1575             firstPart = firstLargePart;
1576           } else {
1577             firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1578           }
1579           if (lastLargePart < rem) {
1580             lastPart = lastLargePart;
1581           } else {
1582             lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1583           }
1584         } else {
1585           firstPart = myFirst / (nvGlobal/nparts);
1586           lastPart  = myLast  / (nvGlobal/nparts);
1587         }
1588 
1589         for (np = firstPart; np <= lastPart; np++) {
1590           PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1591           PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1592 
1593           PartStart = PetscMax(PartStart,myFirst);
1594           PartEnd   = PetscMin(PartEnd,myLast+1);
1595           ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1596         }
1597       }
1598     }
1599   }
1600   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1601   PetscFunctionReturn(0);
1602 }
1603 
1604 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1605 {
1606   PetscFunctionBegin;
1607   part->noGraph        = PETSC_TRUE;
1608   part->ops->view      = PetscPartitionerView_Simple;
1609   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1610   part->ops->partition = PetscPartitionerPartition_Simple;
1611   PetscFunctionReturn(0);
1612 }
1613 
1614 /*MC
1615   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1616 
1617   Level: intermediate
1618 
1619 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1620 M*/
1621 
1622 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1623 {
1624   PetscPartitioner_Simple *p;
1625   PetscErrorCode           ierr;
1626 
1627   PetscFunctionBegin;
1628   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1629   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1630   part->data = p;
1631 
1632   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1633   PetscFunctionReturn(0);
1634 }
1635 
1636 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1637 {
1638   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1639   PetscErrorCode          ierr;
1640 
1641   PetscFunctionBegin;
1642   ierr = PetscFree(p);CHKERRQ(ierr);
1643   PetscFunctionReturn(0);
1644 }
1645 
1646 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1647 {
1648   PetscFunctionBegin;
1649   PetscFunctionReturn(0);
1650 }
1651 
1652 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1653 {
1654   PetscBool      iascii;
1655   PetscErrorCode ierr;
1656 
1657   PetscFunctionBegin;
1658   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1659   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1660   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1661   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1662   PetscFunctionReturn(0);
1663 }
1664 
1665 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1666 {
1667   PetscInt       np;
1668   PetscErrorCode ierr;
1669 
1670   PetscFunctionBegin;
1671   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1672   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1673   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1674   PetscFunctionReturn(0);
1675 }
1676 
1677 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1678 {
1679   PetscFunctionBegin;
1680   part->noGraph        = PETSC_TRUE;
1681   part->ops->view      = PetscPartitionerView_Gather;
1682   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1683   part->ops->partition = PetscPartitionerPartition_Gather;
1684   PetscFunctionReturn(0);
1685 }
1686 
1687 /*MC
1688   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1689 
1690   Level: intermediate
1691 
1692 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1693 M*/
1694 
1695 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1696 {
1697   PetscPartitioner_Gather *p;
1698   PetscErrorCode           ierr;
1699 
1700   PetscFunctionBegin;
1701   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1702   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1703   part->data = p;
1704 
1705   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1706   PetscFunctionReturn(0);
1707 }
1708 
1709 
1710 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1711 {
1712   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1713   PetscErrorCode          ierr;
1714 
1715   PetscFunctionBegin;
1716   ierr = PetscFree(p);CHKERRQ(ierr);
1717   PetscFunctionReturn(0);
1718 }
1719 
1720 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1721 {
1722   PetscFunctionBegin;
1723   PetscFunctionReturn(0);
1724 }
1725 
1726 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1727 {
1728   PetscBool      iascii;
1729   PetscErrorCode ierr;
1730 
1731   PetscFunctionBegin;
1732   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1733   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1734   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1735   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1736   PetscFunctionReturn(0);
1737 }
1738 
1739 #if defined(PETSC_HAVE_CHACO)
1740 #if defined(PETSC_HAVE_UNISTD_H)
1741 #include <unistd.h>
1742 #endif
1743 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1744 #include <chaco.h>
1745 #else
1746 /* Older versions of Chaco do not have an include file */
1747 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1748                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1749                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1750                        int mesh_dims[3], double *goal, int global_method, int local_method,
1751                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1752 #endif
1753 extern int FREE_GRAPH;
1754 #endif
1755 
1756 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1757 {
1758 #if defined(PETSC_HAVE_CHACO)
1759   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1760   MPI_Comm       comm;
1761   int            nvtxs          = numVertices; /* number of vertices in full graph */
1762   int           *vwgts          = NULL;   /* weights for all vertices */
1763   float         *ewgts          = NULL;   /* weights for all edges */
1764   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1765   char          *outassignname  = NULL;   /*  name of assignment output file */
1766   char          *outfilename    = NULL;   /* output file name */
1767   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1768   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1769   int            mesh_dims[3];            /* dimensions of mesh of processors */
1770   double        *goal          = NULL;    /* desired set sizes for each set */
1771   int            global_method = 1;       /* global partitioning algorithm */
1772   int            local_method  = 1;       /* local partitioning algorithm */
1773   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1774   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1775   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1776   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1777   long           seed          = 123636512; /* for random graph mutations */
1778 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1779   int           *assignment;              /* Output partition */
1780 #else
1781   short int     *assignment;              /* Output partition */
1782 #endif
1783   int            fd_stdout, fd_pipe[2];
1784   PetscInt      *points;
1785   int            i, v, p;
1786   PetscErrorCode ierr;
1787 
1788   PetscFunctionBegin;
1789   ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
1790 #if defined (PETSC_USE_DEBUG)
1791   {
1792     int ival,isum;
1793     PetscBool distributed;
1794 
1795     ival = (numVertices > 0);
1796     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1797     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1798     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1799   }
1800 #endif
1801   if (!numVertices) { /* distributed case, return if not holding the graph */
1802     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1803     PetscFunctionReturn(0);
1804   }
1805   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1806   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1807 
1808   if (global_method == INERTIAL_METHOD) {
1809     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1810     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1811   }
1812   mesh_dims[0] = nparts;
1813   mesh_dims[1] = 1;
1814   mesh_dims[2] = 1;
1815   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1816   /* Chaco outputs to stdout. We redirect this to a buffer. */
1817   /* TODO: check error codes for UNIX calls */
1818 #if defined(PETSC_HAVE_UNISTD_H)
1819   {
1820     int piperet;
1821     piperet = pipe(fd_pipe);
1822     if (piperet) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"Could not create pipe");
1823     fd_stdout = dup(1);
1824     close(1);
1825     dup2(fd_pipe[1], 1);
1826   }
1827 #endif
1828   if (part->usevwgt) { ierr = PetscInfo(part,"PETSCPARTITIONERCHACO ignores vertex weights\n");CHKERRQ(ierr); }
1829   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1830                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1831                    vmax, ndims, eigtol, seed);
1832 #if defined(PETSC_HAVE_UNISTD_H)
1833   {
1834     char msgLog[10000];
1835     int  count;
1836 
1837     fflush(stdout);
1838     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1839     if (count < 0) count = 0;
1840     msgLog[count] = 0;
1841     close(1);
1842     dup2(fd_stdout, 1);
1843     close(fd_stdout);
1844     close(fd_pipe[0]);
1845     close(fd_pipe[1]);
1846     if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1847   }
1848 #else
1849   if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1850 #endif
1851   /* Convert to PetscSection+IS */
1852   for (v = 0; v < nvtxs; ++v) {
1853     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1854   }
1855   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1856   for (p = 0, i = 0; p < nparts; ++p) {
1857     for (v = 0; v < nvtxs; ++v) {
1858       if (assignment[v] == p) points[i++] = v;
1859     }
1860   }
1861   if (i != nvtxs) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1862   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1863   if (global_method == INERTIAL_METHOD) {
1864     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1865   }
1866   ierr = PetscFree(assignment);CHKERRQ(ierr);
1867   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1868   PetscFunctionReturn(0);
1869 #else
1870   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1871 #endif
1872 }
1873 
1874 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1875 {
1876   PetscFunctionBegin;
1877   part->noGraph        = PETSC_FALSE;
1878   part->ops->view      = PetscPartitionerView_Chaco;
1879   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1880   part->ops->partition = PetscPartitionerPartition_Chaco;
1881   PetscFunctionReturn(0);
1882 }
1883 
1884 /*MC
1885   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1886 
1887   Level: intermediate
1888 
1889 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1890 M*/
1891 
1892 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1893 {
1894   PetscPartitioner_Chaco *p;
1895   PetscErrorCode          ierr;
1896 
1897   PetscFunctionBegin;
1898   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1899   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1900   part->data = p;
1901 
1902   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1903   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1904   PetscFunctionReturn(0);
1905 }
1906 
1907 static const char *ptypes[] = {"kway", "rb"};
1908 
1909 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1910 {
1911   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1912   PetscErrorCode             ierr;
1913 
1914   PetscFunctionBegin;
1915   ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
1916   ierr = PetscFree(p);CHKERRQ(ierr);
1917   PetscFunctionReturn(0);
1918 }
1919 
1920 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1921 {
1922   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1923   PetscErrorCode             ierr;
1924 
1925   PetscFunctionBegin;
1926   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1927   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1928   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1929   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1930   ierr = PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);CHKERRQ(ierr);
1931   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1932   PetscFunctionReturn(0);
1933 }
1934 
1935 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1936 {
1937   PetscBool      iascii;
1938   PetscErrorCode ierr;
1939 
1940   PetscFunctionBegin;
1941   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1942   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1943   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1944   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1945   PetscFunctionReturn(0);
1946 }
1947 
1948 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1949 {
1950   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1951   PetscErrorCode            ierr;
1952 
1953   PetscFunctionBegin;
1954   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1955   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1956   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1957   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1958   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);CHKERRQ(ierr);
1959   ierr = PetscOptionsTail();CHKERRQ(ierr);
1960   PetscFunctionReturn(0);
1961 }
1962 
1963 #if defined(PETSC_HAVE_PARMETIS)
1964 #include <parmetis.h>
1965 #endif
1966 
1967 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1968 {
1969 #if defined(PETSC_HAVE_PARMETIS)
1970   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1971   MPI_Comm       comm;
1972   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1973   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1974   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1975   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1976   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1977   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1978   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1979   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1980   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1981   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1982   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1983   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1984   PetscInt       options[64];               /* Options */
1985   PetscInt       v, i, *assignment, *points;
1986   PetscMPIInt    p, size, rank;
1987   PetscBool      hasempty = PETSC_FALSE;
1988   PetscErrorCode ierr;
1989 
1990   PetscFunctionBegin;
1991   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1992   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1993   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1994   /* Calculate vertex distribution */
1995   ierr = PetscMalloc4(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1996   vtxdist[0] = 0;
1997   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1998   for (p = 2; p <= size; ++p) {
1999     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
2000     vtxdist[p] += vtxdist[p-1];
2001   }
2002   /* null graph */
2003   if (vtxdist[size] == 0) {
2004     ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
2005     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2006     PetscFunctionReturn(0);
2007   }
2008   /* Calculate partition weights */
2009   if (targetSection) {
2010     PetscInt p;
2011     real_t   sumt = 0.0;
2012 
2013     for (p = 0; p < nparts; ++p) {
2014       PetscInt tpd;
2015 
2016       ierr = PetscSectionGetDof(targetSection,p,&tpd);CHKERRQ(ierr);
2017       sumt += tpd;
2018       tpwgts[p] = tpd;
2019     }
2020     if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
2021       for (p = 0, sumt = 0.0; p < nparts; ++p) {
2022         tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
2023         sumt += tpwgts[p];
2024       }
2025       for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
2026       for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
2027       tpwgts[nparts - 1] = 1. - sumt;
2028     }
2029   } else {
2030     for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0/nparts;
2031   }
2032   ubvec[0] = pm->imbalanceRatio;
2033 
2034   /* Weight cells */
2035   if (vertSection) {
2036     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
2037     for (v = 0; v < nvtxs; ++v) {
2038       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
2039     }
2040     wgtflag |= 2; /* have weights on graph vertices */
2041   }
2042 
2043   for (p = 0; !vtxdist[p+1] && p < size; ++p);
2044   if (vtxdist[p+1] == vtxdist[size]) {
2045     if (rank == p) {
2046       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
2047       options[METIS_OPTION_DBGLVL] = pm->debugFlag;
2048       options[METIS_OPTION_SEED]   = pm->randomSeed;
2049       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
2050       if (metis_ptype == 1) {
2051         PetscStackPush("METIS_PartGraphRecursive");
2052         ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2053         PetscStackPop;
2054         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
2055       } else {
2056         /*
2057          It would be nice to activate the two options below, but they would need some actual testing.
2058          - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
2059          - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
2060         */
2061         /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
2062         /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
2063         PetscStackPush("METIS_PartGraphKway");
2064         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2065         PetscStackPop;
2066         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
2067       }
2068     }
2069   } else {
2070     MPI_Comm pcomm;
2071 
2072     options[0] = 1; /*use options */
2073     options[1] = pm->debugFlag;
2074     options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
2075 
2076     if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
2077       PetscInt cnt;
2078 
2079       ierr = MPI_Comm_split(pm->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
2080       for (p=0,cnt=0;p<size;p++) {
2081         if (vtxdist[p+1] != vtxdist[p]) {
2082           vtxdist[cnt+1] = vtxdist[p+1];
2083           cnt++;
2084         }
2085       }
2086     } else pcomm = pm->pcomm;
2087     if (nvtxs) {
2088       PetscStackPush("ParMETIS_V3_PartKway");
2089       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
2090       PetscStackPop;
2091       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
2092     }
2093     if (hasempty) {
2094       ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
2095     }
2096   }
2097 
2098   /* Convert to PetscSection+IS */
2099   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2100   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2101   for (p = 0, i = 0; p < nparts; ++p) {
2102     for (v = 0; v < nvtxs; ++v) {
2103       if (assignment[v] == p) points[i++] = v;
2104     }
2105   }
2106   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2107   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2108   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
2109   ierr = PetscFree(vwgt);CHKERRQ(ierr);
2110   PetscFunctionReturn(0);
2111 #else
2112   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
2113 #endif
2114 }
2115 
2116 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
2117 {
2118   PetscFunctionBegin;
2119   part->noGraph             = PETSC_FALSE;
2120   part->ops->view           = PetscPartitionerView_ParMetis;
2121   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
2122   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
2123   part->ops->partition      = PetscPartitionerPartition_ParMetis;
2124   PetscFunctionReturn(0);
2125 }
2126 
2127 /*MC
2128   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
2129 
2130   Level: intermediate
2131 
2132   Options Database Keys:
2133 +  -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
2134 .  -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
2135 .  -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
2136 -  -petscpartitioner_parmetis_seed <int> - Random seed
2137 
2138   Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
2139 
2140 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2141 M*/
2142 
2143 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
2144 {
2145   PetscPartitioner_ParMetis *p;
2146   PetscErrorCode          ierr;
2147 
2148   PetscFunctionBegin;
2149   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2150   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
2151   part->data = p;
2152 
2153   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
2154   p->ptype          = 0;
2155   p->imbalanceRatio = 1.05;
2156   p->debugFlag      = 0;
2157   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
2158 
2159   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
2160   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
2161   PetscFunctionReturn(0);
2162 }
2163 
2164 #if defined(PETSC_HAVE_PTSCOTCH)
2165 
2166 EXTERN_C_BEGIN
2167 #include <ptscotch.h>
2168 EXTERN_C_END
2169 
2170 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
2171 
2172 static int PTScotch_Strategy(PetscInt strategy)
2173 {
2174   switch (strategy) {
2175   case  0: return SCOTCH_STRATDEFAULT;
2176   case  1: return SCOTCH_STRATQUALITY;
2177   case  2: return SCOTCH_STRATSPEED;
2178   case  3: return SCOTCH_STRATBALANCE;
2179   case  4: return SCOTCH_STRATSAFETY;
2180   case  5: return SCOTCH_STRATSCALABILITY;
2181   case  6: return SCOTCH_STRATRECURSIVE;
2182   case  7: return SCOTCH_STRATREMAP;
2183   default: return SCOTCH_STRATDEFAULT;
2184   }
2185 }
2186 
2187 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2188                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
2189 {
2190   SCOTCH_Graph   grafdat;
2191   SCOTCH_Strat   stradat;
2192   SCOTCH_Num     vertnbr = n;
2193   SCOTCH_Num     edgenbr = xadj[n];
2194   SCOTCH_Num*    velotab = vtxwgt;
2195   SCOTCH_Num*    edlotab = adjwgt;
2196   SCOTCH_Num     flagval = strategy;
2197   double         kbalval = imbalance;
2198   PetscErrorCode ierr;
2199 
2200   PetscFunctionBegin;
2201   {
2202     PetscBool flg = PETSC_TRUE;
2203     ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
2204     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2205     if (!flg) velotab = NULL;
2206   }
2207   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
2208   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
2209   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2210   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
2211   if (tpart) {
2212     SCOTCH_Arch archdat;
2213     ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2214     ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2215     ierr = SCOTCH_graphMap(&grafdat, &archdat, &stradat, part);CHKERRPTSCOTCH(ierr);
2216     SCOTCH_archExit(&archdat);
2217   } else {
2218     ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
2219   }
2220   SCOTCH_stratExit(&stradat);
2221   SCOTCH_graphExit(&grafdat);
2222   PetscFunctionReturn(0);
2223 }
2224 
2225 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2226                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
2227 {
2228   PetscMPIInt     procglbnbr;
2229   PetscMPIInt     proclocnum;
2230   SCOTCH_Arch     archdat;
2231   SCOTCH_Dgraph   grafdat;
2232   SCOTCH_Dmapping mappdat;
2233   SCOTCH_Strat    stradat;
2234   SCOTCH_Num      vertlocnbr;
2235   SCOTCH_Num      edgelocnbr;
2236   SCOTCH_Num*     veloloctab = vtxwgt;
2237   SCOTCH_Num*     edloloctab = adjwgt;
2238   SCOTCH_Num      flagval = strategy;
2239   double          kbalval = imbalance;
2240   PetscErrorCode  ierr;
2241 
2242   PetscFunctionBegin;
2243   {
2244     PetscBool flg = PETSC_TRUE;
2245     ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
2246     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2247     if (!flg) veloloctab = NULL;
2248   }
2249   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
2250   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
2251   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
2252   edgelocnbr = xadj[vertlocnbr];
2253 
2254   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
2255   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
2256   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2257   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
2258   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2259   if (tpart) { /* target partition weights */
2260     ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2261   } else {
2262     ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2263   }
2264   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
2265 
2266   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2267   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2268   SCOTCH_archExit(&archdat);
2269   SCOTCH_stratExit(&stradat);
2270   SCOTCH_dgraphExit(&grafdat);
2271   PetscFunctionReturn(0);
2272 }
2273 
2274 #endif /* PETSC_HAVE_PTSCOTCH */
2275 
2276 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2277 {
2278   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2279   PetscErrorCode             ierr;
2280 
2281   PetscFunctionBegin;
2282   ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
2283   ierr = PetscFree(p);CHKERRQ(ierr);
2284   PetscFunctionReturn(0);
2285 }
2286 
2287 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2288 {
2289   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2290   PetscErrorCode            ierr;
2291 
2292   PetscFunctionBegin;
2293   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
2294   ierr = PetscViewerASCIIPrintf(viewer,"using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
2295   ierr = PetscViewerASCIIPrintf(viewer,"using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
2296   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
2297   PetscFunctionReturn(0);
2298 }
2299 
2300 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2301 {
2302   PetscBool      iascii;
2303   PetscErrorCode ierr;
2304 
2305   PetscFunctionBegin;
2306   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2307   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
2308   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
2309   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
2310   PetscFunctionReturn(0);
2311 }
2312 
2313 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2314 {
2315   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2316   const char *const         *slist = PTScotchStrategyList;
2317   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2318   PetscBool                 flag;
2319   PetscErrorCode            ierr;
2320 
2321   PetscFunctionBegin;
2322   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
2323   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
2324   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
2325   ierr = PetscOptionsTail();CHKERRQ(ierr);
2326   PetscFunctionReturn(0);
2327 }
2328 
2329 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
2330 {
2331 #if defined(PETSC_HAVE_PTSCOTCH)
2332   MPI_Comm       comm;
2333   PetscInt       nvtxs = numVertices;   /* The number of vertices in full graph */
2334   PetscInt       *vtxdist;              /* Distribution of vertices across processes */
2335   PetscInt       *xadj   = start;       /* Start of edge list for each vertex */
2336   PetscInt       *adjncy = adjacency;   /* Edge lists for all vertices */
2337   PetscInt       *vwgt   = NULL;        /* Vertex weights */
2338   PetscInt       *adjwgt = NULL;        /* Edge weights */
2339   PetscInt       v, i, *assignment, *points;
2340   PetscMPIInt    size, rank, p;
2341   PetscBool      hasempty = PETSC_FALSE;
2342   PetscInt       *tpwgts = NULL;
2343   PetscErrorCode ierr;
2344 
2345   PetscFunctionBegin;
2346   ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
2347   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2348   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2349   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
2350   /* Calculate vertex distribution */
2351   vtxdist[0] = 0;
2352   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2353   for (p = 2; p <= size; ++p) {
2354     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
2355     vtxdist[p] += vtxdist[p-1];
2356   }
2357   /* null graph */
2358   if (vtxdist[size] == 0) {
2359     ierr = PetscFree2(vtxdist, assignment);CHKERRQ(ierr);
2360     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2361     PetscFunctionReturn(0);
2362   }
2363 
2364   /* Calculate vertex weights */
2365   if (vertSection) {
2366     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
2367     for (v = 0; v < nvtxs; ++v) {
2368       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
2369     }
2370   }
2371 
2372   /* Calculate partition weights */
2373   if (targetSection) {
2374     PetscInt sumw;
2375 
2376     ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
2377     for (p = 0, sumw = 0; p < nparts; ++p) {
2378       ierr = PetscSectionGetDof(targetSection,p,&tpwgts[p]);CHKERRQ(ierr);
2379       sumw += tpwgts[p];
2380     }
2381     if (!sumw) {
2382       ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2383     }
2384   }
2385 
2386   {
2387     PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2388     int                       strat = PTScotch_Strategy(pts->strategy);
2389     double                    imbal = (double)pts->imbalance;
2390 
2391     for (p = 0; !vtxdist[p+1] && p < size; ++p);
2392     if (vtxdist[p+1] == vtxdist[size]) {
2393       if (rank == p) {
2394         ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment);CHKERRQ(ierr);
2395       }
2396     } else {
2397       PetscInt cnt;
2398       MPI_Comm pcomm;
2399 
2400       if (hasempty) {
2401         ierr = MPI_Comm_split(pts->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
2402         for (p=0,cnt=0;p<size;p++) {
2403           if (vtxdist[p+1] != vtxdist[p]) {
2404             vtxdist[cnt+1] = vtxdist[p+1];
2405             cnt++;
2406           }
2407         }
2408       } else pcomm = pts->pcomm;
2409       if (nvtxs) {
2410         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm);CHKERRQ(ierr);
2411       }
2412       if (hasempty) {
2413         ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
2414       }
2415     }
2416   }
2417   ierr = PetscFree(vwgt);CHKERRQ(ierr);
2418   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2419 
2420   /* Convert to PetscSection+IS */
2421   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2422   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2423   for (p = 0, i = 0; p < nparts; ++p) {
2424     for (v = 0; v < nvtxs; ++v) {
2425       if (assignment[v] == p) points[i++] = v;
2426     }
2427   }
2428   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2429   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2430 
2431   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
2432   PetscFunctionReturn(0);
2433 #else
2434   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2435 #endif
2436 }
2437 
2438 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2439 {
2440   PetscFunctionBegin;
2441   part->noGraph             = PETSC_FALSE;
2442   part->ops->view           = PetscPartitionerView_PTScotch;
2443   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2444   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2445   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2446   PetscFunctionReturn(0);
2447 }
2448 
2449 /*MC
2450   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2451 
2452   Level: intermediate
2453 
2454   Options Database Keys:
2455 +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
2456 -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
2457 
2458   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
2459 
2460 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2461 M*/
2462 
2463 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2464 {
2465   PetscPartitioner_PTScotch *p;
2466   PetscErrorCode          ierr;
2467 
2468   PetscFunctionBegin;
2469   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2470   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2471   part->data = p;
2472 
2473   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
2474   p->strategy  = 0;
2475   p->imbalance = 0.01;
2476 
2477   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2478   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2479   PetscFunctionReturn(0);
2480 }
2481 
2482 /*@
2483   DMPlexGetPartitioner - Get the mesh partitioner
2484 
2485   Not collective
2486 
2487   Input Parameter:
2488 . dm - The DM
2489 
2490   Output Parameter:
2491 . part - The PetscPartitioner
2492 
2493   Level: developer
2494 
2495   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2496 
2497 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerDMPlexPartition(), PetscPartitionerCreate()
2498 @*/
2499 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2500 {
2501   DM_Plex       *mesh = (DM_Plex *) dm->data;
2502 
2503   PetscFunctionBegin;
2504   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2505   PetscValidPointer(part, 2);
2506   *part = mesh->partitioner;
2507   PetscFunctionReturn(0);
2508 }
2509 
2510 /*@
2511   DMPlexSetPartitioner - Set the mesh partitioner
2512 
2513   logically collective on DM
2514 
2515   Input Parameters:
2516 + dm - The DM
2517 - part - The partitioner
2518 
2519   Level: developer
2520 
2521   Note: Any existing PetscPartitioner will be destroyed.
2522 
2523 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2524 @*/
2525 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2526 {
2527   DM_Plex       *mesh = (DM_Plex *) dm->data;
2528   PetscErrorCode ierr;
2529 
2530   PetscFunctionBegin;
2531   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2532   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2533   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2534   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2535   mesh->partitioner = part;
2536   PetscFunctionReturn(0);
2537 }
2538 
2539 static PetscErrorCode DMPlexAddClosure_Private(DM dm, PetscHSetI ht, PetscInt point)
2540 {
2541   const PetscInt *cone;
2542   PetscInt       coneSize, c;
2543   PetscBool      missing;
2544   PetscErrorCode ierr;
2545 
2546   PetscFunctionBeginHot;
2547   ierr = PetscHSetIQueryAdd(ht, point, &missing);CHKERRQ(ierr);
2548   if (missing) {
2549     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2550     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2551     for (c = 0; c < coneSize; c++) {
2552       ierr = DMPlexAddClosure_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2553     }
2554   }
2555   PetscFunctionReturn(0);
2556 }
2557 
2558 PETSC_UNUSED static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2559 {
2560   PetscErrorCode ierr;
2561 
2562   PetscFunctionBegin;
2563   if (up) {
2564     PetscInt parent;
2565 
2566     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2567     if (parent != point) {
2568       PetscInt closureSize, *closure = NULL, i;
2569 
2570       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2571       for (i = 0; i < closureSize; i++) {
2572         PetscInt cpoint = closure[2*i];
2573 
2574         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2575         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2576       }
2577       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2578     }
2579   }
2580   if (down) {
2581     PetscInt numChildren;
2582     const PetscInt *children;
2583 
2584     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2585     if (numChildren) {
2586       PetscInt i;
2587 
2588       for (i = 0; i < numChildren; i++) {
2589         PetscInt cpoint = children[i];
2590 
2591         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2592         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2593       }
2594     }
2595   }
2596   PetscFunctionReturn(0);
2597 }
2598 
2599 static PetscErrorCode DMPlexAddClosureTree_Up_Private(DM dm, PetscHSetI ht, PetscInt point)
2600 {
2601   PetscInt       parent;
2602   PetscErrorCode ierr;
2603 
2604   PetscFunctionBeginHot;
2605   ierr = DMPlexGetTreeParent(dm, point, &parent,NULL);CHKERRQ(ierr);
2606   if (point != parent) {
2607     const PetscInt *cone;
2608     PetscInt       coneSize, c;
2609 
2610     ierr = DMPlexAddClosureTree_Up_Private(dm, ht, parent);CHKERRQ(ierr);
2611     ierr = DMPlexAddClosure_Private(dm, ht, parent);CHKERRQ(ierr);
2612     ierr = DMPlexGetCone(dm, parent, &cone);CHKERRQ(ierr);
2613     ierr = DMPlexGetConeSize(dm, parent, &coneSize);CHKERRQ(ierr);
2614     for (c = 0; c < coneSize; c++) {
2615       const PetscInt cp = cone[c];
2616 
2617       ierr = DMPlexAddClosureTree_Up_Private(dm, ht, cp);CHKERRQ(ierr);
2618     }
2619   }
2620   PetscFunctionReturn(0);
2621 }
2622 
2623 static PetscErrorCode DMPlexAddClosureTree_Down_Private(DM dm, PetscHSetI ht, PetscInt point)
2624 {
2625   PetscInt       i, numChildren;
2626   const PetscInt *children;
2627   PetscErrorCode ierr;
2628 
2629   PetscFunctionBeginHot;
2630   ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
2631   for (i = 0; i < numChildren; i++) {
2632     ierr = PetscHSetIAdd(ht, children[i]);CHKERRQ(ierr);
2633   }
2634   PetscFunctionReturn(0);
2635 }
2636 
2637 static PetscErrorCode DMPlexAddClosureTree_Private(DM dm, PetscHSetI ht, PetscInt point)
2638 {
2639   const PetscInt *cone;
2640   PetscInt       coneSize, c;
2641   PetscErrorCode ierr;
2642 
2643   PetscFunctionBeginHot;
2644   ierr = PetscHSetIAdd(ht, point);CHKERRQ(ierr);
2645   ierr = DMPlexAddClosureTree_Up_Private(dm, ht, point);CHKERRQ(ierr);
2646   ierr = DMPlexAddClosureTree_Down_Private(dm, ht, point);CHKERRQ(ierr);
2647   ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2648   ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2649   for (c = 0; c < coneSize; c++) {
2650     ierr = DMPlexAddClosureTree_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2651   }
2652   PetscFunctionReturn(0);
2653 }
2654 
2655 PetscErrorCode DMPlexClosurePoints_Private(DM dm, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2656 {
2657   DM_Plex         *mesh = (DM_Plex *)dm->data;
2658   const PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2659   PetscInt        nelems, *elems, off = 0, p;
2660   PetscHSetI      ht;
2661   PetscErrorCode  ierr;
2662 
2663   PetscFunctionBegin;
2664   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2665   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2666   if (!hasTree) {
2667     for (p = 0; p < numPoints; ++p) {
2668       ierr = DMPlexAddClosure_Private(dm, ht, points[p]);CHKERRQ(ierr);
2669     }
2670   } else {
2671 #if 1
2672     for (p = 0; p < numPoints; ++p) {
2673       ierr = DMPlexAddClosureTree_Private(dm, ht, points[p]);CHKERRQ(ierr);
2674     }
2675 #else
2676     PetscInt  *closure = NULL, closureSize, c;
2677     for (p = 0; p < numPoints; ++p) {
2678       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2679       for (c = 0; c < closureSize*2; c += 2) {
2680         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2681         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2682       }
2683     }
2684     if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2685 #endif
2686   }
2687   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2688   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2689   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2690   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2691   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2692   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2693   PetscFunctionReturn(0);
2694 }
2695 
2696 /*@
2697   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2698 
2699   Input Parameters:
2700 + dm     - The DM
2701 - label  - DMLabel assinging ranks to remote roots
2702 
2703   Level: developer
2704 
2705 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2706 @*/
2707 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2708 {
2709   IS              rankIS,   pointIS, closureIS;
2710   const PetscInt *ranks,   *points;
2711   PetscInt        numRanks, numPoints, r;
2712   PetscErrorCode  ierr;
2713 
2714   PetscFunctionBegin;
2715   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2716   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2717   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2718   for (r = 0; r < numRanks; ++r) {
2719     const PetscInt rank = ranks[r];
2720     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2721     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2722     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2723     ierr = DMPlexClosurePoints_Private(dm, numPoints, points, &closureIS);CHKERRQ(ierr);
2724     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2725     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2726     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2727     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2728   }
2729   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2730   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2731   PetscFunctionReturn(0);
2732 }
2733 
2734 /*@
2735   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2736 
2737   Input Parameters:
2738 + dm     - The DM
2739 - label  - DMLabel assinging ranks to remote roots
2740 
2741   Level: developer
2742 
2743 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2744 @*/
2745 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2746 {
2747   IS              rankIS,   pointIS;
2748   const PetscInt *ranks,   *points;
2749   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2750   PetscInt       *adj = NULL;
2751   PetscErrorCode  ierr;
2752 
2753   PetscFunctionBegin;
2754   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2755   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2756   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2757   for (r = 0; r < numRanks; ++r) {
2758     const PetscInt rank = ranks[r];
2759 
2760     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2761     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2762     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2763     for (p = 0; p < numPoints; ++p) {
2764       adjSize = PETSC_DETERMINE;
2765       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2766       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2767     }
2768     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2769     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2770   }
2771   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2772   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2773   ierr = PetscFree(adj);CHKERRQ(ierr);
2774   PetscFunctionReturn(0);
2775 }
2776 
2777 /*@
2778   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2779 
2780   Input Parameters:
2781 + dm     - The DM
2782 - label  - DMLabel assinging ranks to remote roots
2783 
2784   Level: developer
2785 
2786   Note: This is required when generating multi-level overlaps to capture
2787   overlap points from non-neighbouring partitions.
2788 
2789 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2790 @*/
2791 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2792 {
2793   MPI_Comm        comm;
2794   PetscMPIInt     rank;
2795   PetscSF         sfPoint;
2796   DMLabel         lblRoots, lblLeaves;
2797   IS              rankIS, pointIS;
2798   const PetscInt *ranks;
2799   PetscInt        numRanks, r;
2800   PetscErrorCode  ierr;
2801 
2802   PetscFunctionBegin;
2803   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2804   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2805   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2806   /* Pull point contributions from remote leaves into local roots */
2807   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2808   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2809   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2810   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2811   for (r = 0; r < numRanks; ++r) {
2812     const PetscInt remoteRank = ranks[r];
2813     if (remoteRank == rank) continue;
2814     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2815     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2816     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2817   }
2818   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2819   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2820   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2821   /* Push point contributions from roots into remote leaves */
2822   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2823   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2824   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2825   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2826   for (r = 0; r < numRanks; ++r) {
2827     const PetscInt remoteRank = ranks[r];
2828     if (remoteRank == rank) continue;
2829     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2830     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2831     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2832   }
2833   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2834   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2835   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2836   PetscFunctionReturn(0);
2837 }
2838 
2839 /*@
2840   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2841 
2842   Input Parameters:
2843 + dm        - The DM
2844 . rootLabel - DMLabel assinging ranks to local roots
2845 - processSF - A star forest mapping into the local index on each remote rank
2846 
2847   Output Parameter:
2848 . leafLabel - DMLabel assinging ranks to remote roots
2849 
2850   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2851   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2852 
2853   Level: developer
2854 
2855 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2856 @*/
2857 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2858 {
2859   MPI_Comm           comm;
2860   PetscMPIInt        rank, size, r;
2861   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2862   PetscSF            sfPoint;
2863   PetscSection       rootSection;
2864   PetscSFNode       *rootPoints, *leafPoints;
2865   const PetscSFNode *remote;
2866   const PetscInt    *local, *neighbors;
2867   IS                 valueIS;
2868   PetscBool          mpiOverflow = PETSC_FALSE;
2869   PetscErrorCode     ierr;
2870 
2871   PetscFunctionBegin;
2872   ierr = PetscLogEventBegin(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2873   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2874   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2875   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2876   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2877 
2878   /* Convert to (point, rank) and use actual owners */
2879   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2880   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2881   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2882   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2883   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2884   for (n = 0; n < numNeighbors; ++n) {
2885     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2886     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2887   }
2888   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2889   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2890   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2891   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2892   for (n = 0; n < numNeighbors; ++n) {
2893     IS              pointIS;
2894     const PetscInt *points;
2895 
2896     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2897     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2898     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2899     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2900     for (p = 0; p < numPoints; ++p) {
2901       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2902       else       {l = -1;}
2903       if (l >= 0) {rootPoints[off+p] = remote[l];}
2904       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2905     }
2906     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2907     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2908   }
2909 
2910   /* Try to communicate overlap using All-to-All */
2911   if (!processSF) {
2912     PetscInt64  counter = 0;
2913     PetscBool   locOverflow = PETSC_FALSE;
2914     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2915 
2916     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2917     for (n = 0; n < numNeighbors; ++n) {
2918       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2919       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2920 #if defined(PETSC_USE_64BIT_INDICES)
2921       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2922       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2923 #endif
2924       scounts[neighbors[n]] = (PetscMPIInt) dof;
2925       sdispls[neighbors[n]] = (PetscMPIInt) off;
2926     }
2927     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2928     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2929     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2930     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2931     if (!mpiOverflow) {
2932       ierr = PetscInfo(dm,"Using Alltoallv for mesh distribution\n");CHKERRQ(ierr);
2933       leafSize = (PetscInt) counter;
2934       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2935       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2936     }
2937     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2938   }
2939 
2940   /* Communicate overlap using process star forest */
2941   if (processSF || mpiOverflow) {
2942     PetscSF      procSF;
2943     PetscSection leafSection;
2944 
2945     if (processSF) {
2946       ierr = PetscInfo(dm,"Using processSF for mesh distribution\n");CHKERRQ(ierr);
2947       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2948       procSF = processSF;
2949     } else {
2950       ierr = PetscInfo(dm,"Using processSF for mesh distribution (MPI overflow)\n");CHKERRQ(ierr);
2951       ierr = PetscSFCreate(comm,&procSF);CHKERRQ(ierr);
2952       ierr = PetscSFSetGraphWithPattern(procSF,NULL,PETSCSF_PATTERN_ALLTOALL);CHKERRQ(ierr);
2953     }
2954 
2955     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2956     ierr = DMPlexDistributeData(dm, procSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2957     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2958     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2959     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2960   }
2961 
2962   for (p = 0; p < leafSize; p++) {
2963     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2964   }
2965 
2966   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2967   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2968   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2969   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2970   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2971   ierr = PetscLogEventEnd(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2972   PetscFunctionReturn(0);
2973 }
2974 
2975 /*@
2976   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2977 
2978   Input Parameters:
2979 + dm    - The DM
2980 . label - DMLabel assinging ranks to remote roots
2981 
2982   Output Parameter:
2983 . sf    - The star forest communication context encapsulating the defined mapping
2984 
2985   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2986 
2987   Level: developer
2988 
2989 .seealso: DMPlexDistribute(), DMPlexCreateOverlap()
2990 @*/
2991 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2992 {
2993   PetscMPIInt     rank;
2994   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0, nNeighbors;
2995   PetscSFNode    *remotePoints;
2996   IS              remoteRootIS, neighborsIS;
2997   const PetscInt *remoteRoots, *neighbors;
2998   PetscErrorCode ierr;
2999 
3000   PetscFunctionBegin;
3001   ierr = PetscLogEventBegin(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
3002   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
3003 
3004   ierr = DMLabelGetValueIS(label, &neighborsIS);CHKERRQ(ierr);
3005 #if 0
3006   {
3007     IS is;
3008     ierr = ISDuplicate(neighborsIS, &is);CHKERRQ(ierr);
3009     ierr = ISSort(is);CHKERRQ(ierr);
3010     ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
3011     neighborsIS = is;
3012   }
3013 #endif
3014   ierr = ISGetLocalSize(neighborsIS, &nNeighbors);CHKERRQ(ierr);
3015   ierr = ISGetIndices(neighborsIS, &neighbors);CHKERRQ(ierr);
3016   for (numRemote = 0, n = 0; n < nNeighbors; ++n) {
3017     ierr = DMLabelGetStratumSize(label, neighbors[n], &numPoints);CHKERRQ(ierr);
3018     numRemote += numPoints;
3019   }
3020   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
3021   /* Put owned points first */
3022   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
3023   if (numPoints > 0) {
3024     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
3025     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3026     for (p = 0; p < numPoints; p++) {
3027       remotePoints[idx].index = remoteRoots[p];
3028       remotePoints[idx].rank = rank;
3029       idx++;
3030     }
3031     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3032     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
3033   }
3034   /* Now add remote points */
3035   for (n = 0; n < nNeighbors; ++n) {
3036     const PetscInt nn = neighbors[n];
3037 
3038     ierr = DMLabelGetStratumSize(label, nn, &numPoints);CHKERRQ(ierr);
3039     if (nn == rank || numPoints <= 0) continue;
3040     ierr = DMLabelGetStratumIS(label, nn, &remoteRootIS);CHKERRQ(ierr);
3041     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3042     for (p = 0; p < numPoints; p++) {
3043       remotePoints[idx].index = remoteRoots[p];
3044       remotePoints[idx].rank = nn;
3045       idx++;
3046     }
3047     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3048     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
3049   }
3050   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
3051   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3052   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
3053   ierr = PetscSFSetUp(*sf);CHKERRQ(ierr);
3054   ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
3055   ierr = PetscLogEventEnd(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
3056   PetscFunctionReturn(0);
3057 }
3058 
3059 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
3060  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
3061  * them out in that case. */
3062 #if defined(PETSC_HAVE_PARMETIS)
3063 /*@C
3064 
3065   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
3066 
3067   Input parameters:
3068 + dm                - The DMPlex object.
3069 . n                 - The number of points.
3070 . pointsToRewrite   - The points in the SF whose ownership will change.
3071 . targetOwners      - New owner for each element in pointsToRewrite.
3072 - degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
3073 
3074   Level: developer
3075 
3076 @*/
3077 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
3078 {
3079   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
3080   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
3081   PetscSFNode  *leafLocationsNew;
3082   const         PetscSFNode *iremote;
3083   const         PetscInt *ilocal;
3084   PetscBool    *isLeaf;
3085   PetscSF       sf;
3086   MPI_Comm      comm;
3087   PetscMPIInt   rank, size;
3088 
3089   PetscFunctionBegin;
3090   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3091   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3092   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3093   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3094 
3095   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3096   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3097   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3098   for (i=0; i<pEnd-pStart; i++) {
3099     isLeaf[i] = PETSC_FALSE;
3100   }
3101   for (i=0; i<nleafs; i++) {
3102     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3103   }
3104 
3105   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
3106   cumSumDegrees[0] = 0;
3107   for (i=1; i<=pEnd-pStart; i++) {
3108     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
3109   }
3110   sumDegrees = cumSumDegrees[pEnd-pStart];
3111   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
3112 
3113   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
3114   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
3115   for (i=0; i<pEnd-pStart; i++) {
3116     rankOnLeafs[i] = rank;
3117   }
3118   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
3119   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
3120   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
3121 
3122   /* get the remote local points of my leaves */
3123   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
3124   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
3125   for (i=0; i<pEnd-pStart; i++) {
3126     points[i] = pStart+i;
3127   }
3128   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
3129   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
3130   ierr = PetscFree(points);CHKERRQ(ierr);
3131   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
3132   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
3133   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
3134   for (i=0; i<pEnd-pStart; i++) {
3135     newOwners[i] = -1;
3136     newNumbers[i] = -1;
3137   }
3138   {
3139     PetscInt oldNumber, newNumber, oldOwner, newOwner;
3140     for (i=0; i<n; i++) {
3141       oldNumber = pointsToRewrite[i];
3142       newNumber = -1;
3143       oldOwner = rank;
3144       newOwner = targetOwners[i];
3145       if (oldOwner == newOwner) {
3146         newNumber = oldNumber;
3147       } else {
3148         for (j=0; j<degrees[oldNumber]; j++) {
3149           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
3150             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
3151             break;
3152           }
3153         }
3154       }
3155       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
3156 
3157       newOwners[oldNumber] = newOwner;
3158       newNumbers[oldNumber] = newNumber;
3159     }
3160   }
3161   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
3162   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
3163   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
3164 
3165   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
3166   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
3167   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
3168   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
3169 
3170   /* Now count how many leafs we have on each processor. */
3171   leafCounter=0;
3172   for (i=0; i<pEnd-pStart; i++) {
3173     if (newOwners[i] >= 0) {
3174       if (newOwners[i] != rank) {
3175         leafCounter++;
3176       }
3177     } else {
3178       if (isLeaf[i]) {
3179         leafCounter++;
3180       }
3181     }
3182   }
3183 
3184   /* Now set up the new sf by creating the leaf arrays */
3185   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
3186   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
3187 
3188   leafCounter = 0;
3189   counter = 0;
3190   for (i=0; i<pEnd-pStart; i++) {
3191     if (newOwners[i] >= 0) {
3192       if (newOwners[i] != rank) {
3193         leafsNew[leafCounter] = i;
3194         leafLocationsNew[leafCounter].rank = newOwners[i];
3195         leafLocationsNew[leafCounter].index = newNumbers[i];
3196         leafCounter++;
3197       }
3198     } else {
3199       if (isLeaf[i]) {
3200         leafsNew[leafCounter] = i;
3201         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
3202         leafLocationsNew[leafCounter].index = iremote[counter].index;
3203         leafCounter++;
3204       }
3205     }
3206     if (isLeaf[i]) {
3207       counter++;
3208     }
3209   }
3210 
3211   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
3212   ierr = PetscFree(newOwners);CHKERRQ(ierr);
3213   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
3214   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3215   PetscFunctionReturn(0);
3216 }
3217 
3218 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
3219 {
3220   PetscInt *distribution, min, max, sum, i, ierr;
3221   PetscMPIInt rank, size;
3222   PetscFunctionBegin;
3223   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3224   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3225   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
3226   for (i=0; i<n; i++) {
3227     if (part) distribution[part[i]] += vtxwgt[skip*i];
3228     else distribution[rank] += vtxwgt[skip*i];
3229   }
3230   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3231   min = distribution[0];
3232   max = distribution[0];
3233   sum = distribution[0];
3234   for (i=1; i<size; i++) {
3235     if (distribution[i]<min) min=distribution[i];
3236     if (distribution[i]>max) max=distribution[i];
3237     sum += distribution[i];
3238   }
3239   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
3240   ierr = PetscFree(distribution);CHKERRQ(ierr);
3241   PetscFunctionReturn(0);
3242 }
3243 
3244 #endif
3245 
3246 /*@
3247   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
3248 
3249   Input parameters:
3250 + dm               - The DMPlex object.
3251 . entityDepth      - depth of the entity to balance (0 -> balance vertices).
3252 . useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
3253 - parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
3254 
3255   Output parameters:
3256 . success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
3257 
3258   Level: intermediate
3259 
3260 @*/
3261 
3262 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
3263 {
3264 #if defined(PETSC_HAVE_PARMETIS)
3265   PetscSF     sf;
3266   PetscInt    ierr, i, j, idx, jdx;
3267   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
3268   const       PetscInt *degrees, *ilocal;
3269   const       PetscSFNode *iremote;
3270   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
3271   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
3272   PetscMPIInt rank, size;
3273   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
3274   const       PetscInt *cumSumVertices;
3275   PetscInt    offset, counter;
3276   PetscInt    lenadjncy;
3277   PetscInt    *xadj, *adjncy, *vtxwgt;
3278   PetscInt    lenxadj;
3279   PetscInt    *adjwgt = NULL;
3280   PetscInt    *part, *options;
3281   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
3282   real_t      *ubvec;
3283   PetscInt    *firstVertices, *renumbering;
3284   PetscInt    failed, failedGlobal;
3285   MPI_Comm    comm;
3286   Mat         A, Apre;
3287   const char *prefix = NULL;
3288   PetscViewer       viewer;
3289   PetscViewerFormat format;
3290   PetscLayout layout;
3291 
3292   PetscFunctionBegin;
3293   if (success) *success = PETSC_FALSE;
3294   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3295   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3296   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3297   if (size==1) PetscFunctionReturn(0);
3298 
3299   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3300 
3301   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
3302   if (viewer) {
3303     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
3304   }
3305 
3306   /* Figure out all points in the plex that we are interested in balancing. */
3307   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
3308   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3309   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
3310 
3311   for (i=0; i<pEnd-pStart; i++) {
3312     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
3313   }
3314 
3315   /* There are three types of points:
3316    * exclusivelyOwned: points that are owned by this process and only seen by this process
3317    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
3318    * leaf: a point that is seen by this process but owned by a different process
3319    */
3320   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3321   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3322   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3323   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
3324   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
3325   for (i=0; i<pEnd-pStart; i++) {
3326     isNonExclusivelyOwned[i] = PETSC_FALSE;
3327     isExclusivelyOwned[i] = PETSC_FALSE;
3328     isLeaf[i] = PETSC_FALSE;
3329   }
3330 
3331   /* start by marking all the leafs */
3332   for (i=0; i<nleafs; i++) {
3333     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3334   }
3335 
3336   /* for an owned point, we can figure out whether another processor sees it or
3337    * not by calculating its degree */
3338   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
3339   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
3340 
3341   numExclusivelyOwned = 0;
3342   numNonExclusivelyOwned = 0;
3343   for (i=0; i<pEnd-pStart; i++) {
3344     if (toBalance[i]) {
3345       if (degrees[i] > 0) {
3346         isNonExclusivelyOwned[i] = PETSC_TRUE;
3347         numNonExclusivelyOwned += 1;
3348       } else {
3349         if (!isLeaf[i]) {
3350           isExclusivelyOwned[i] = PETSC_TRUE;
3351           numExclusivelyOwned += 1;
3352         }
3353       }
3354     }
3355   }
3356 
3357   /* We are going to build a graph with one vertex per core representing the
3358    * exclusively owned points and then one vertex per nonExclusively owned
3359    * point. */
3360 
3361   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3362   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
3363   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3364   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
3365 
3366   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3367   for (i=0; i<pEnd-pStart; i++) {globalNumbersOfLocalOwnedVertices[i] = pStart - 1;}
3368   offset = cumSumVertices[rank];
3369   counter = 0;
3370   for (i=0; i<pEnd-pStart; i++) {
3371     if (toBalance[i]) {
3372       if (degrees[i] > 0) {
3373         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
3374         counter++;
3375       }
3376     }
3377   }
3378 
3379   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
3380   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
3381   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3382   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3383 
3384   /* Now start building the data structure for ParMETIS */
3385 
3386   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
3387   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
3388   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3389   ierr = MatSetUp(Apre);CHKERRQ(ierr);
3390 
3391   for (i=0; i<pEnd-pStart; i++) {
3392     if (toBalance[i]) {
3393       idx = cumSumVertices[rank];
3394       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3395       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3396       else continue;
3397       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3398       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3399     }
3400   }
3401 
3402   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3403   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3404 
3405   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
3406   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
3407   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3408   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
3409   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
3410 
3411   for (i=0; i<pEnd-pStart; i++) {
3412     if (toBalance[i]) {
3413       idx = cumSumVertices[rank];
3414       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3415       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3416       else continue;
3417       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3418       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3419     }
3420   }
3421 
3422   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3423   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3424   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
3425   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3426 
3427   nparts = size;
3428   wgtflag = 2;
3429   numflag = 0;
3430   ncon = 2;
3431   real_t *tpwgts;
3432   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
3433   for (i=0; i<ncon*nparts; i++) {
3434     tpwgts[i] = 1./(nparts);
3435   }
3436 
3437   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
3438   ubvec[0] = 1.01;
3439   ubvec[1] = 1.01;
3440   lenadjncy = 0;
3441   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3442     PetscInt temp=0;
3443     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3444     lenadjncy += temp;
3445     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3446   }
3447   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
3448   lenxadj = 2 + numNonExclusivelyOwned;
3449   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
3450   xadj[0] = 0;
3451   counter = 0;
3452   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3453     PetscInt        temp=0;
3454     const PetscInt *cols;
3455     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3456     ierr = PetscArraycpy(&adjncy[counter], cols, temp);CHKERRQ(ierr);
3457     counter += temp;
3458     xadj[i+1] = counter;
3459     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3460   }
3461 
3462   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
3463   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
3464   vtxwgt[0] = numExclusivelyOwned;
3465   if (ncon>1) vtxwgt[1] = 1;
3466   for (i=0; i<numNonExclusivelyOwned; i++) {
3467     vtxwgt[ncon*(i+1)] = 1;
3468     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3469   }
3470 
3471   if (viewer) {
3472     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
3473     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
3474   }
3475   if (parallel) {
3476     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
3477     options[0] = 1;
3478     options[1] = 0; /* Verbosity */
3479     options[2] = 0; /* Seed */
3480     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3481     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
3482     if (useInitialGuess) {
3483       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
3484       PetscStackPush("ParMETIS_V3_RefineKway");
3485       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3486       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3487       PetscStackPop;
3488     } else {
3489       PetscStackPush("ParMETIS_V3_PartKway");
3490       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3491       PetscStackPop;
3492       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3493     }
3494     ierr = PetscFree(options);CHKERRQ(ierr);
3495   } else {
3496     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
3497     Mat As;
3498     PetscInt numRows;
3499     PetscInt *partGlobal;
3500     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
3501 
3502     PetscInt *numExclusivelyOwnedAll;
3503     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
3504     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3505     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
3506 
3507     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
3508     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
3509     if (!rank) {
3510       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3511       lenadjncy = 0;
3512 
3513       for (i=0; i<numRows; i++) {
3514         PetscInt temp=0;
3515         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3516         lenadjncy += temp;
3517         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3518       }
3519       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
3520       lenxadj = 1 + numRows;
3521       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
3522       xadj_g[0] = 0;
3523       counter = 0;
3524       for (i=0; i<numRows; i++) {
3525         PetscInt        temp=0;
3526         const PetscInt *cols;
3527         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3528         ierr = PetscArraycpy(&adjncy_g[counter], cols, temp);CHKERRQ(ierr);
3529         counter += temp;
3530         xadj_g[i+1] = counter;
3531         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3532       }
3533       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
3534       for (i=0; i<size; i++){
3535         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3536         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3537         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3538           vtxwgt_g[ncon*j] = 1;
3539           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3540         }
3541       }
3542       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
3543       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
3544       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3545       options[METIS_OPTION_CONTIG] = 1;
3546       PetscStackPush("METIS_PartGraphKway");
3547       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3548       PetscStackPop;
3549       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3550       ierr = PetscFree(options);CHKERRQ(ierr);
3551       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
3552       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
3553       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
3554     }
3555     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
3556 
3557     /* Now scatter the parts array. */
3558     {
3559       PetscMPIInt *counts, *mpiCumSumVertices;
3560       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
3561       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
3562       for(i=0; i<size; i++) {
3563         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3564       }
3565       for(i=0; i<=size; i++) {
3566         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3567       }
3568       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3569       ierr = PetscFree(counts);CHKERRQ(ierr);
3570       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3571     }
3572 
3573     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3574     ierr = MatDestroy(&As);CHKERRQ(ierr);
3575   }
3576 
3577   ierr = MatDestroy(&A);CHKERRQ(ierr);
3578   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3579   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3580 
3581   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3582 
3583   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3584   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3585   firstVertices[rank] = part[0];
3586   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3587   for (i=0; i<size; i++) {
3588     renumbering[firstVertices[i]] = i;
3589   }
3590   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3591     part[i] = renumbering[part[i]];
3592   }
3593   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3594   failed = (PetscInt)(part[0] != rank);
3595   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3596 
3597   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3598   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3599 
3600   if (failedGlobal > 0) {
3601     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3602     ierr = PetscFree(xadj);CHKERRQ(ierr);
3603     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3604     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3605     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3606     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3607     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3608     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3609     ierr = PetscFree(part);CHKERRQ(ierr);
3610     if (viewer) {
3611       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3612       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3613     }
3614     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3615     PetscFunctionReturn(0);
3616   }
3617 
3618   /*Let's check how well we did distributing points*/
3619   if (viewer) {
3620     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3621     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3622     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3623     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3624     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3625   }
3626 
3627   /* Now check that every vertex is owned by a process that it is actually connected to. */
3628   for (i=1; i<=numNonExclusivelyOwned; i++) {
3629     PetscInt loc = 0;
3630     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3631     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3632     if (loc<0) {
3633       part[i] = rank;
3634     }
3635   }
3636 
3637   /* Let's see how significant the influences of the previous fixing up step was.*/
3638   if (viewer) {
3639     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3640     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3641   }
3642 
3643   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3644   ierr = PetscFree(xadj);CHKERRQ(ierr);
3645   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3646   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3647 
3648   /* Almost done, now rewrite the SF to reflect the new ownership. */
3649   {
3650     PetscInt *pointsToRewrite;
3651     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3652     counter = 0;
3653     for(i=0; i<pEnd-pStart; i++) {
3654       if (toBalance[i]) {
3655         if (isNonExclusivelyOwned[i]) {
3656           pointsToRewrite[counter] = i + pStart;
3657           counter++;
3658         }
3659       }
3660     }
3661     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3662     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3663   }
3664 
3665   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3666   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3667   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3668   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3669   ierr = PetscFree(part);CHKERRQ(ierr);
3670   if (viewer) {
3671     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3672     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3673   }
3674   if (success) *success = PETSC_TRUE;
3675   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3676 #else
3677   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3678 #endif
3679   PetscFunctionReturn(0);
3680 }
3681