xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 0e03b746557e2551025fde0294144c0532d12f68)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {https://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PetscBool PTScotchPartitionercite = PETSC_FALSE;
31 const char PTScotchPartitionerCitation[] =
32   "@article{PTSCOTCH,\n"
33   "  author  = {C. Chevalier and F. Pellegrini},\n"
34   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
35   "  journal = {Parallel Computing},\n"
36   "  volume  = {34},\n"
37   "  number  = {6},\n"
38   "  pages   = {318--331},\n"
39   "  year    = {2008},\n"
40   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
41   "}\n";
42 
43 
44 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
45 
46 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
47 {
48   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
49   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
50   IS             cellNumbering;
51   const PetscInt *cellNum;
52   PetscBool      useCone, useClosure;
53   PetscSection   section;
54   PetscSegBuffer adjBuffer;
55   PetscSF        sfPoint;
56   PetscInt       *adjCells = NULL, *remoteCells = NULL;
57   const PetscInt *local;
58   PetscInt       nroots, nleaves, l;
59   PetscMPIInt    rank;
60   PetscErrorCode ierr;
61 
62   PetscFunctionBegin;
63   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
64   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
65   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
66   if (dim != depth) {
67     /* We do not handle the uninterpolated case here */
68     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
69     /* DMPlexCreateNeighborCSR does not make a numbering */
70     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
71     /* Different behavior for empty graphs */
72     if (!*numVertices) {
73       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
74       (*offsets)[0] = 0;
75     }
76     /* Broken in parallel */
77     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
78     PetscFunctionReturn(0);
79   }
80   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
81   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
82   /* Build adjacency graph via a section/segbuffer */
83   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
84   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
85   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
86   /* Always use FVM adjacency to create partitioner graph */
87   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
88   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
89   ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
90   if (globalNumbering) {
91     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
92     *globalNumbering = cellNumbering;
93   }
94   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
95   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
96      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
97    */
98   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
99   if (nroots >= 0) {
100     PetscInt fStart, fEnd, f;
101 
102     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
103     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
104     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
105     for (f = fStart; f < fEnd; ++f) {
106       const PetscInt *support;
107       PetscInt        supportSize;
108 
109       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
110       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
111       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
112       else if (supportSize == 2) {
113         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
114         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
115         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
116         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
117       }
118       /* Handle non-conforming meshes */
119       if (supportSize > 2) {
120         PetscInt        numChildren, i;
121         const PetscInt *children;
122 
123         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
124         for (i = 0; i < numChildren; ++i) {
125           const PetscInt child = children[i];
126           if (fStart <= child && child < fEnd) {
127             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
128             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
129             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
130             else if (supportSize == 2) {
131               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
132               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
133               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
134               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
135             }
136           }
137         }
138       }
139     }
140     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
141     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
142     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
143     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
144     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
145   }
146   /* Combine local and global adjacencies */
147   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
148     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
149     if (nroots > 0) {if (cellNum[p] < 0) continue;}
150     /* Add remote cells */
151     if (remoteCells) {
152       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
153       PetscInt       coneSize, numChildren, c, i;
154       const PetscInt *cone, *children;
155 
156       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
157       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
158       for (c = 0; c < coneSize; ++c) {
159         const PetscInt point = cone[c];
160         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
161           PetscInt *PETSC_RESTRICT pBuf;
162           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
163           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
164           *pBuf = remoteCells[point];
165         }
166         /* Handle non-conforming meshes */
167         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
168         for (i = 0; i < numChildren; ++i) {
169           const PetscInt child = children[i];
170           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
171             PetscInt *PETSC_RESTRICT pBuf;
172             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
173             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
174             *pBuf = remoteCells[child];
175           }
176         }
177       }
178     }
179     /* Add local cells */
180     adjSize = PETSC_DETERMINE;
181     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
182     for (a = 0; a < adjSize; ++a) {
183       const PetscInt point = adj[a];
184       if (point != p && pStart <= point && point < pEnd) {
185         PetscInt *PETSC_RESTRICT pBuf;
186         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
187         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
188         *pBuf = DMPlex_GlobalID(cellNum[point]);
189       }
190     }
191     (*numVertices)++;
192   }
193   ierr = PetscFree(adj);CHKERRQ(ierr);
194   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
195   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
196 
197   /* Derive CSR graph from section/segbuffer */
198   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
199   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
200   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
201   for (idx = 0, p = pStart; p < pEnd; p++) {
202     if (nroots > 0) {if (cellNum[p] < 0) continue;}
203     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
204   }
205   vOffsets[*numVertices] = size;
206   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
207 
208   if (nroots >= 0) {
209     /* Filter out duplicate edges using section/segbuffer */
210     ierr = PetscSectionReset(section);CHKERRQ(ierr);
211     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
212     for (p = 0; p < *numVertices; p++) {
213       PetscInt start = vOffsets[p], end = vOffsets[p+1];
214       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
215       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
216       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
217       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
218       ierr = PetscArraycpy(edges, &graph[start], numEdges);CHKERRQ(ierr);
219     }
220     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
221     ierr = PetscFree(graph);CHKERRQ(ierr);
222     /* Derive CSR graph from section/segbuffer */
223     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
224     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
225     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
226     for (idx = 0, p = 0; p < *numVertices; p++) {
227       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
228     }
229     vOffsets[*numVertices] = size;
230     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
231   } else {
232     /* Sort adjacencies (not strictly necessary) */
233     for (p = 0; p < *numVertices; p++) {
234       PetscInt start = vOffsets[p], end = vOffsets[p+1];
235       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
236     }
237   }
238 
239   if (offsets) *offsets = vOffsets;
240   if (adjacency) *adjacency = graph;
241 
242   /* Cleanup */
243   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
244   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
245   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
246   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
247   PetscFunctionReturn(0);
248 }
249 
250 static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
251 {
252   Mat            conn, CSR;
253   IS             fis, cis, cis_own;
254   PetscSF        sfPoint;
255   const PetscInt *rows, *cols, *ii, *jj;
256   PetscInt       *idxs,*idxs2;
257   PetscInt       dim, depth, floc, cloc, i, M, N, c, lm, m, cStart, cEnd, fStart, fEnd;
258   PetscMPIInt    rank;
259   PetscBool      flg;
260   PetscErrorCode ierr;
261 
262   PetscFunctionBegin;
263   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
264   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
265   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
266   if (dim != depth) {
267     /* We do not handle the uninterpolated case here */
268     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
269     /* DMPlexCreateNeighborCSR does not make a numbering */
270     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
271     /* Different behavior for empty graphs */
272     if (!*numVertices) {
273       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
274       (*offsets)[0] = 0;
275     }
276     /* Broken in parallel */
277     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
278     PetscFunctionReturn(0);
279   }
280   /* Interpolated and parallel case */
281 
282   /* numbering */
283   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
284   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
285   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
286   ierr = DMPlexCreateNumbering_Plex(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
287   ierr = DMPlexCreateNumbering_Plex(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
288   if (globalNumbering) {
289     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
290   }
291 
292   /* get positive global ids and local sizes for facets and cells */
293   ierr = ISGetLocalSize(fis, &m);CHKERRQ(ierr);
294   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
295   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
296   for (i = 0, floc = 0; i < m; i++) {
297     const PetscInt p = rows[i];
298 
299     if (p < 0) {
300       idxs[i] = -(p+1);
301     } else {
302       idxs[i] = p;
303       floc   += 1;
304     }
305   }
306   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
307   ierr = ISDestroy(&fis);CHKERRQ(ierr);
308   ierr = ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);CHKERRQ(ierr);
309 
310   ierr = ISGetLocalSize(cis, &m);CHKERRQ(ierr);
311   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
312   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
313   ierr = PetscMalloc1(m, &idxs2);CHKERRQ(ierr);
314   for (i = 0, cloc = 0; i < m; i++) {
315     const PetscInt p = cols[i];
316 
317     if (p < 0) {
318       idxs[i] = -(p+1);
319     } else {
320       idxs[i]       = p;
321       idxs2[cloc++] = p;
322     }
323   }
324   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
325   ierr = ISDestroy(&cis);CHKERRQ(ierr);
326   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);CHKERRQ(ierr);
327   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);CHKERRQ(ierr);
328 
329   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
330   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
331   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
332   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
333   ierr = DMPlexGetMaxSizes(dm, NULL, &lm);CHKERRQ(ierr);
334   ierr = MPI_Allreduce(&lm, &m, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject) dm));CHKERRQ(ierr);
335   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
336 
337   /* Assemble matrix */
338   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
339   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
340   for (c = cStart; c < cEnd; c++) {
341     const PetscInt *cone;
342     PetscInt        coneSize, row, col, f;
343 
344     col  = cols[c-cStart];
345     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
346     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
347     for (f = 0; f < coneSize; f++) {
348       const PetscScalar v = 1.0;
349       const PetscInt *children;
350       PetscInt        numChildren, ch;
351 
352       row  = rows[cone[f]-fStart];
353       ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
354 
355       /* non-conforming meshes */
356       ierr = DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);CHKERRQ(ierr);
357       for (ch = 0; ch < numChildren; ch++) {
358         const PetscInt child = children[ch];
359 
360         if (child < fStart || child >= fEnd) continue;
361         row  = rows[child-fStart];
362         ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
363       }
364     }
365   }
366   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
367   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
368   ierr = ISDestroy(&fis);CHKERRQ(ierr);
369   ierr = ISDestroy(&cis);CHKERRQ(ierr);
370   ierr = MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
371   ierr = MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
372 
373   /* Get parallel CSR by doing conn^T * conn */
374   ierr = MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);CHKERRQ(ierr);
375   ierr = MatDestroy(&conn);CHKERRQ(ierr);
376 
377   /* extract local part of the CSR */
378   ierr = MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);CHKERRQ(ierr);
379   ierr = MatDestroy(&CSR);CHKERRQ(ierr);
380   ierr = MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
381   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
382 
383   /* get back requested output */
384   if (numVertices) *numVertices = m;
385   if (offsets) {
386     ierr = PetscCalloc1(m+1, &idxs);CHKERRQ(ierr);
387     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
388     *offsets = idxs;
389   }
390   if (adjacency) {
391     ierr = PetscMalloc1(ii[m] - m, &idxs);CHKERRQ(ierr);
392     ierr = ISGetIndices(cis_own, &rows);CHKERRQ(ierr);
393     for (i = 0, c = 0; i < m; i++) {
394       PetscInt j, g = rows[i];
395 
396       for (j = ii[i]; j < ii[i+1]; j++) {
397         if (jj[j] == g) continue; /* again, self-connectivity */
398         idxs[c++] = jj[j];
399       }
400     }
401     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
402     ierr = ISRestoreIndices(cis_own, &rows);CHKERRQ(ierr);
403     *adjacency = idxs;
404   }
405 
406   /* cleanup */
407   ierr = ISDestroy(&cis_own);CHKERRQ(ierr);
408   ierr = MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
409   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
410   ierr = MatDestroy(&conn);CHKERRQ(ierr);
411   PetscFunctionReturn(0);
412 }
413 
414 /*@C
415   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
416 
417   Input Parameters:
418 + dm      - The mesh DM dm
419 - height  - Height of the strata from which to construct the graph
420 
421   Output Parameter:
422 + numVertices     - Number of vertices in the graph
423 . offsets         - Point offsets in the graph
424 . adjacency       - Point connectivity in the graph
425 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
426 
427   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
428   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().
429 
430   Level: developer
431 
432 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
433 @*/
434 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
435 {
436   PetscErrorCode ierr;
437   PetscBool      usemat = PETSC_FALSE;
438 
439   PetscFunctionBegin;
440   ierr = PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);CHKERRQ(ierr);
441   if (usemat) {
442     ierr = DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
443   } else {
444     ierr = DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
445   }
446   PetscFunctionReturn(0);
447 }
448 
449 /*@C
450   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
451 
452   Collective on DM
453 
454   Input Arguments:
455 + dm - The DMPlex
456 - cellHeight - The height of mesh points to treat as cells (default should be 0)
457 
458   Output Arguments:
459 + numVertices - The number of local vertices in the graph, or cells in the mesh.
460 . offsets     - The offset to the adjacency list for each cell
461 - adjacency   - The adjacency list for all cells
462 
463   Note: This is suitable for input to a mesh partitioner like ParMetis.
464 
465   Level: advanced
466 
467 .seealso: DMPlexCreate()
468 @*/
469 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
470 {
471   const PetscInt maxFaceCases = 30;
472   PetscInt       numFaceCases = 0;
473   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
474   PetscInt      *off, *adj;
475   PetscInt      *neighborCells = NULL;
476   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
477   PetscErrorCode ierr;
478 
479   PetscFunctionBegin;
480   /* For parallel partitioning, I think you have to communicate supports */
481   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
482   cellDim = dim - cellHeight;
483   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
484   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
485   if (cEnd - cStart == 0) {
486     if (numVertices) *numVertices = 0;
487     if (offsets)   *offsets   = NULL;
488     if (adjacency) *adjacency = NULL;
489     PetscFunctionReturn(0);
490   }
491   numCells  = cEnd - cStart;
492   faceDepth = depth - cellHeight;
493   if (dim == depth) {
494     PetscInt f, fStart, fEnd;
495 
496     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
497     /* Count neighboring cells */
498     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
499     for (f = fStart; f < fEnd; ++f) {
500       const PetscInt *support;
501       PetscInt        supportSize;
502       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
503       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
504       if (supportSize == 2) {
505         PetscInt numChildren;
506 
507         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
508         if (!numChildren) {
509           ++off[support[0]-cStart+1];
510           ++off[support[1]-cStart+1];
511         }
512       }
513     }
514     /* Prefix sum */
515     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
516     if (adjacency) {
517       PetscInt *tmp;
518 
519       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
520       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
521       ierr = PetscArraycpy(tmp, off, numCells+1);CHKERRQ(ierr);
522       /* Get neighboring cells */
523       for (f = fStart; f < fEnd; ++f) {
524         const PetscInt *support;
525         PetscInt        supportSize;
526         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
527         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
528         if (supportSize == 2) {
529           PetscInt numChildren;
530 
531           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
532           if (!numChildren) {
533             adj[tmp[support[0]-cStart]++] = support[1];
534             adj[tmp[support[1]-cStart]++] = support[0];
535           }
536         }
537       }
538 #if defined(PETSC_USE_DEBUG)
539       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
540 #endif
541       ierr = PetscFree(tmp);CHKERRQ(ierr);
542     }
543     if (numVertices) *numVertices = numCells;
544     if (offsets)   *offsets   = off;
545     if (adjacency) *adjacency = adj;
546     PetscFunctionReturn(0);
547   }
548   /* Setup face recognition */
549   if (faceDepth == 1) {
550     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
551 
552     for (c = cStart; c < cEnd; ++c) {
553       PetscInt corners;
554 
555       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
556       if (!cornersSeen[corners]) {
557         PetscInt nFV;
558 
559         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
560         cornersSeen[corners] = 1;
561 
562         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
563 
564         numFaceVertices[numFaceCases++] = nFV;
565       }
566     }
567   }
568   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
569   /* Count neighboring cells */
570   for (cell = cStart; cell < cEnd; ++cell) {
571     PetscInt numNeighbors = PETSC_DETERMINE, n;
572 
573     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
574     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
575     for (n = 0; n < numNeighbors; ++n) {
576       PetscInt        cellPair[2];
577       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
578       PetscInt        meetSize = 0;
579       const PetscInt *meet    = NULL;
580 
581       cellPair[0] = cell; cellPair[1] = neighborCells[n];
582       if (cellPair[0] == cellPair[1]) continue;
583       if (!found) {
584         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
585         if (meetSize) {
586           PetscInt f;
587 
588           for (f = 0; f < numFaceCases; ++f) {
589             if (numFaceVertices[f] == meetSize) {
590               found = PETSC_TRUE;
591               break;
592             }
593           }
594         }
595         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
596       }
597       if (found) ++off[cell-cStart+1];
598     }
599   }
600   /* Prefix sum */
601   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
602 
603   if (adjacency) {
604     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
605     /* Get neighboring cells */
606     for (cell = cStart; cell < cEnd; ++cell) {
607       PetscInt numNeighbors = PETSC_DETERMINE, n;
608       PetscInt cellOffset   = 0;
609 
610       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
611       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
612       for (n = 0; n < numNeighbors; ++n) {
613         PetscInt        cellPair[2];
614         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
615         PetscInt        meetSize = 0;
616         const PetscInt *meet    = NULL;
617 
618         cellPair[0] = cell; cellPair[1] = neighborCells[n];
619         if (cellPair[0] == cellPair[1]) continue;
620         if (!found) {
621           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
622           if (meetSize) {
623             PetscInt f;
624 
625             for (f = 0; f < numFaceCases; ++f) {
626               if (numFaceVertices[f] == meetSize) {
627                 found = PETSC_TRUE;
628                 break;
629               }
630             }
631           }
632           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
633         }
634         if (found) {
635           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
636           ++cellOffset;
637         }
638       }
639     }
640   }
641   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
642   if (numVertices) *numVertices = numCells;
643   if (offsets)   *offsets   = off;
644   if (adjacency) *adjacency = adj;
645   PetscFunctionReturn(0);
646 }
647 
648 /*@C
649   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
650 
651   Not Collective
652 
653   Input Parameters:
654 + name        - The name of a new user-defined creation routine
655 - create_func - The creation routine itself
656 
657   Notes:
658   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
659 
660   Sample usage:
661 .vb
662     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
663 .ve
664 
665   Then, your PetscPartitioner type can be chosen with the procedural interface via
666 .vb
667     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
668     PetscPartitionerSetType(PetscPartitioner, "my_part");
669 .ve
670    or at runtime via the option
671 .vb
672     -petscpartitioner_type my_part
673 .ve
674 
675   Level: advanced
676 
677 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
678 
679 @*/
680 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
681 {
682   PetscErrorCode ierr;
683 
684   PetscFunctionBegin;
685   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
686   PetscFunctionReturn(0);
687 }
688 
689 /*@C
690   PetscPartitionerSetType - Builds a particular PetscPartitioner
691 
692   Collective on PetscPartitioner
693 
694   Input Parameters:
695 + part - The PetscPartitioner object
696 - name - The kind of partitioner
697 
698   Options Database Key:
699 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
700 
701   Level: intermediate
702 
703 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
704 @*/
705 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
706 {
707   PetscErrorCode (*r)(PetscPartitioner);
708   PetscBool      match;
709   PetscErrorCode ierr;
710 
711   PetscFunctionBegin;
712   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
713   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
714   if (match) PetscFunctionReturn(0);
715 
716   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
717   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
718 
719   if (part->ops->destroy) {
720     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
721   }
722   part->noGraph = PETSC_FALSE;
723   ierr = PetscMemzero(part->ops, sizeof(*part->ops));CHKERRQ(ierr);
724   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
725   ierr = (*r)(part);CHKERRQ(ierr);
726   PetscFunctionReturn(0);
727 }
728 
729 /*@C
730   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
731 
732   Not Collective
733 
734   Input Parameter:
735 . part - The PetscPartitioner
736 
737   Output Parameter:
738 . name - The PetscPartitioner type name
739 
740   Level: intermediate
741 
742 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
743 @*/
744 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
745 {
746   PetscFunctionBegin;
747   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
748   PetscValidPointer(name, 2);
749   *name = ((PetscObject) part)->type_name;
750   PetscFunctionReturn(0);
751 }
752 
753 /*@C
754    PetscPartitionerViewFromOptions - View from Options
755 
756    Collective on PetscPartitioner
757 
758    Input Parameters:
759 +  A - the PetscPartitioner object
760 .  obj - Optional object
761 -  name - command line option
762 
763    Level: intermediate
764 .seealso:  PetscPartitionerView(), PetscObjectViewFromOptions()
765 @*/
766 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner A,PetscObject obj,const char name[])
767 {
768   PetscErrorCode ierr;
769 
770   PetscFunctionBegin;
771   PetscValidHeaderSpecific(A,PETSCPARTITIONER_CLASSID,1);
772   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
773   PetscFunctionReturn(0);
774 }
775 
776 /*@C
777   PetscPartitionerView - Views a PetscPartitioner
778 
779   Collective on PetscPartitioner
780 
781   Input Parameter:
782 + part - the PetscPartitioner object to view
783 - v    - the viewer
784 
785   Level: developer
786 
787 .seealso: PetscPartitionerDestroy()
788 @*/
789 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
790 {
791   PetscMPIInt    size;
792   PetscBool      isascii;
793   PetscErrorCode ierr;
794 
795   PetscFunctionBegin;
796   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
797   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
798   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
799   if (isascii) {
800     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
801     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
802     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
803     ierr = PetscViewerASCIIPrintf(v, "  edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
804     ierr = PetscViewerASCIIPrintf(v, "  balance: %.2g\n", part->balance);CHKERRQ(ierr);
805     ierr = PetscViewerASCIIPrintf(v, "  use vertex weights: %d\n", part->usevwgt);CHKERRQ(ierr);
806   }
807   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
808   PetscFunctionReturn(0);
809 }
810 
811 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
812 {
813   PetscFunctionBegin;
814   if (!currentType) {
815 #if defined(PETSC_HAVE_PARMETIS)
816     *defaultType = PETSCPARTITIONERPARMETIS;
817 #elif defined(PETSC_HAVE_PTSCOTCH)
818     *defaultType = PETSCPARTITIONERPTSCOTCH;
819 #elif defined(PETSC_HAVE_CHACO)
820     *defaultType = PETSCPARTITIONERCHACO;
821 #else
822     *defaultType = PETSCPARTITIONERSIMPLE;
823 #endif
824   } else {
825     *defaultType = currentType;
826   }
827   PetscFunctionReturn(0);
828 }
829 
830 /*@
831   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
832 
833   Collective on PetscPartitioner
834 
835   Input Parameter:
836 . part - the PetscPartitioner object to set options for
837 
838   Options Database Keys:
839 +  -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
840 .  -petscpartitioner_use_vertex_weights - Uses weights associated with the graph vertices
841 -  -petscpartitioner_view_graph - View the graph each time PetscPartitionerPartition is called. Viewer can be customized, see PetscOptionsGetViewer()
842 
843   Level: developer
844 
845 .seealso: PetscPartitionerView(), PetscPartitionerSetType(), PetscPartitionerPartition()
846 @*/
847 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
848 {
849   const char    *defaultType;
850   char           name[256];
851   PetscBool      flg;
852   PetscErrorCode ierr;
853 
854   PetscFunctionBegin;
855   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
856   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
857   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
858   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
859   if (flg) {
860     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
861   } else if (!((PetscObject) part)->type_name) {
862     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
863   }
864   ierr = PetscOptionsBool("-petscpartitioner_use_vertex_weights","Use vertex weights","",part->usevwgt,&part->usevwgt,NULL);CHKERRQ(ierr);
865   if (part->ops->setfromoptions) {
866     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
867   }
868   ierr = PetscViewerDestroy(&part->viewer);CHKERRQ(ierr);
869   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
870   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view", &part->viewer, NULL, NULL);CHKERRQ(ierr);
871   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, NULL, &part->viewGraph);CHKERRQ(ierr);
872   /* process any options handlers added with PetscObjectAddOptionsHandler() */
873   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
874   ierr = PetscOptionsEnd();CHKERRQ(ierr);
875   PetscFunctionReturn(0);
876 }
877 
878 /*@C
879   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
880 
881   Collective on PetscPartitioner
882 
883   Input Parameter:
884 . part - the PetscPartitioner object to setup
885 
886   Level: developer
887 
888 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
889 @*/
890 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
891 {
892   PetscErrorCode ierr;
893 
894   PetscFunctionBegin;
895   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
896   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
897   PetscFunctionReturn(0);
898 }
899 
900 /*@
901   PetscPartitionerDestroy - Destroys a PetscPartitioner object
902 
903   Collective on PetscPartitioner
904 
905   Input Parameter:
906 . part - the PetscPartitioner object to destroy
907 
908   Level: developer
909 
910 .seealso: PetscPartitionerView()
911 @*/
912 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
913 {
914   PetscErrorCode ierr;
915 
916   PetscFunctionBegin;
917   if (!*part) PetscFunctionReturn(0);
918   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
919 
920   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
921   ((PetscObject) (*part))->refct = 0;
922 
923   ierr = PetscViewerDestroy(&(*part)->viewer);CHKERRQ(ierr);
924   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
925   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
926   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
927   PetscFunctionReturn(0);
928 }
929 
930 /*@
931   PetscPartitionerPartition - Partition a graph
932 
933   Collective on PetscPartitioner
934 
935   Input Parameters:
936 + part    - The PetscPartitioner
937 . nparts  - Number of partitions
938 . numVertices - Number of vertices in the local part of the graph
939 . start - row pointers for the local part of the graph (CSR style)
940 . adjacency - adjacency list (CSR style)
941 . vertexSection - PetscSection describing the absolute weight of each local vertex (can be NULL)
942 - targetSection - PetscSection describing the absolute weight of each partition (can be NULL)
943 
944   Output Parameters:
945 + partSection     - The PetscSection giving the division of points by partition
946 - partition       - The list of points by partition
947 
948   Options Database:
949 + -petscpartitioner_view - View the partitioner information
950 - -petscpartitioner_view_graph - View the graph we are partitioning
951 
952   Notes:
953     The chart of the vertexSection (if present) must contain [0,numVertices), with the number of dofs in the section specifying the absolute weight for each vertex.
954     The chart of the targetSection (if present) must contain [0,nparts), with the number of dofs in the section specifying the absolute weight for each partition. This information must be the same across processes, PETSc does not check it.
955 
956   Level: developer
957 
958 .seealso PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscSectionSetDof()
959 @*/
960 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertexSection, PetscSection targetSection, PetscSection partSection, IS *partition)
961 {
962   PetscErrorCode ierr;
963 
964   PetscFunctionBegin;
965   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
966   PetscValidLogicalCollectiveInt(part, nparts, 2);
967   if (nparts <= 0) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Number of parts must be positive");
968   if (numVertices < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices must be non-negative");
969   if (numVertices && !part->noGraph) {
970     PetscValidIntPointer(start, 4);
971     PetscValidIntPointer(start + numVertices, 4);
972     if (start[numVertices]) PetscValidIntPointer(adjacency, 5);
973   }
974   if (vertexSection) {
975     PetscInt s,e;
976 
977     PetscValidHeaderSpecific(vertexSection, PETSC_SECTION_CLASSID, 6);
978     ierr = PetscSectionGetChart(vertexSection, &s, &e);CHKERRQ(ierr);
979     if (s > 0 || e < numVertices) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid vertexSection chart [%D,%D)",s,e);
980   }
981   if (targetSection) {
982     PetscInt s,e;
983 
984     PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 7);
985     ierr = PetscSectionGetChart(targetSection, &s, &e);CHKERRQ(ierr);
986     if (s > 0 || e < nparts) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid targetSection chart [%D,%D)",s,e);
987   }
988   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 8);
989   PetscValidPointer(partition, 9);
990 
991   ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
992   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
993   if (nparts == 1) { /* quick */
994     ierr = PetscSectionSetDof(partSection, 0, numVertices);CHKERRQ(ierr);
995     ierr = ISCreateStride(PetscObjectComm((PetscObject)part),numVertices,0,1,partition);CHKERRQ(ierr);
996   } else {
997     if (!part->ops->partition) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "PetscPartitioner %s has no partitioning method", ((PetscObject)part)->type_name);
998     ierr = (*part->ops->partition)(part, nparts, numVertices, start, adjacency, vertexSection, targetSection, partSection, partition);CHKERRQ(ierr);
999   }
1000   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1001   if (part->viewerGraph) {
1002     PetscViewer viewer = part->viewerGraph;
1003     PetscBool   isascii;
1004     PetscInt    v, i;
1005     PetscMPIInt rank;
1006 
1007     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
1008     ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
1009     if (isascii) {
1010       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1011       ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
1012       for (v = 0; v < numVertices; ++v) {
1013         const PetscInt s = start[v];
1014         const PetscInt e = start[v+1];
1015 
1016         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
1017         for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
1018         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
1019       }
1020       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1021       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1022     }
1023   }
1024   if (part->viewer) {
1025     ierr = PetscPartitionerView(part,part->viewer);CHKERRQ(ierr);
1026   }
1027   PetscFunctionReturn(0);
1028 }
1029 
1030 /*@
1031   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
1032 
1033   Collective
1034 
1035   Input Parameter:
1036 . comm - The communicator for the PetscPartitioner object
1037 
1038   Output Parameter:
1039 . part - The PetscPartitioner object
1040 
1041   Level: beginner
1042 
1043 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
1044 @*/
1045 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
1046 {
1047   PetscPartitioner p;
1048   const char       *partitionerType = NULL;
1049   PetscErrorCode   ierr;
1050 
1051   PetscFunctionBegin;
1052   PetscValidPointer(part, 2);
1053   *part = NULL;
1054   ierr = DMInitializePackage();CHKERRQ(ierr);
1055 
1056   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
1057   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
1058   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
1059 
1060   p->edgeCut = 0;
1061   p->balance = 0.0;
1062   p->usevwgt = PETSC_TRUE;
1063 
1064   *part = p;
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 /*@
1069   PetscPartitionerDMPlexPartition - Create a non-overlapping partition of the cells in the mesh
1070 
1071   Collective on PetscPartitioner
1072 
1073   Input Parameters:
1074 + part    - The PetscPartitioner
1075 . targetSection - The PetscSection describing the absolute weight of each partition (can be NULL)
1076 - dm      - The mesh DM
1077 
1078   Output Parameters:
1079 + partSection     - The PetscSection giving the division of points by partition
1080 - partition       - The list of points by partition
1081 
1082   Notes:
1083     If the DM has a local section associated, each point to be partitioned will be weighted by the total number of dofs identified
1084     by the section in the transitive closure of the point.
1085 
1086   Level: developer
1087 
1088 .seealso DMPlexDistribute(), PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscPartitionerPartition()
1089 @*/
1090 PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner part, DM dm, PetscSection targetSection, PetscSection partSection, IS *partition)
1091 {
1092   PetscMPIInt    size;
1093   PetscBool      isplex;
1094   PetscErrorCode ierr;
1095   PetscSection   vertSection = NULL;
1096 
1097   PetscFunctionBegin;
1098   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1099   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
1100   if (targetSection) PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 3);
1101   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
1102   PetscValidPointer(partition, 5);
1103   ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&isplex);CHKERRQ(ierr);
1104   if (!isplex) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Not for type %s",((PetscObject)dm)->type_name);
1105   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
1106   if (size == 1) {
1107     PetscInt *points;
1108     PetscInt  cStart, cEnd, c;
1109 
1110     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1111     ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
1112     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
1113     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
1114     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1115     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
1116     for (c = cStart; c < cEnd; ++c) points[c] = c;
1117     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1118     PetscFunctionReturn(0);
1119   }
1120   if (part->height == 0) {
1121     PetscInt numVertices = 0;
1122     PetscInt *start     = NULL;
1123     PetscInt *adjacency = NULL;
1124     IS       globalNumbering;
1125 
1126     if (!part->noGraph || part->viewGraph) {
1127       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
1128     } else { /* only compute the number of owned local vertices */
1129       const PetscInt *idxs;
1130       PetscInt       p, pStart, pEnd;
1131 
1132       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1133       ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
1134       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1135       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
1136       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1137     }
1138     if (part->usevwgt) {
1139       PetscSection   section = dm->localSection, clSection = NULL;
1140       IS             clPoints = NULL;
1141       const PetscInt *gid,*clIdx;
1142       PetscInt       v, p, pStart, pEnd;
1143 
1144       /* dm->localSection encodes degrees of freedom per point, not per cell. We need to get the closure index to properly specify cell weights (aka dofs) */
1145       /* We do this only if the local section has been set */
1146       if (section) {
1147         ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, NULL);CHKERRQ(ierr);
1148         if (!clSection) {
1149           ierr = DMPlexCreateClosureIndex(dm,NULL);CHKERRQ(ierr);
1150         }
1151         ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, &clPoints);CHKERRQ(ierr);
1152         ierr = ISGetIndices(clPoints,&clIdx);CHKERRQ(ierr);
1153       }
1154       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1155       ierr = PetscSectionCreate(PETSC_COMM_SELF, &vertSection);CHKERRQ(ierr);
1156       ierr = PetscSectionSetChart(vertSection, 0, numVertices);CHKERRQ(ierr);
1157       if (globalNumbering) {
1158         ierr = ISGetIndices(globalNumbering,&gid);CHKERRQ(ierr);
1159       } else gid = NULL;
1160       for (p = pStart, v = 0; p < pEnd; ++p) {
1161         PetscInt dof = 1;
1162 
1163         /* skip cells in the overlap */
1164         if (gid && gid[p-pStart] < 0) continue;
1165 
1166         if (section) {
1167           PetscInt cl, clSize, clOff;
1168 
1169           dof  = 0;
1170           ierr = PetscSectionGetDof(clSection, p, &clSize);CHKERRQ(ierr);
1171           ierr = PetscSectionGetOffset(clSection, p, &clOff);CHKERRQ(ierr);
1172           for (cl = 0; cl < clSize; cl+=2) {
1173             PetscInt clDof, clPoint = clIdx[clOff + cl]; /* odd indices are reserved for orientations */
1174 
1175             ierr = PetscSectionGetDof(section, clPoint, &clDof);CHKERRQ(ierr);
1176             dof += clDof;
1177           }
1178         }
1179         if (!dof) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of dofs for point %D in the local section should be positive",p);
1180         ierr = PetscSectionSetDof(vertSection, v, dof);CHKERRQ(ierr);
1181         v++;
1182       }
1183       if (globalNumbering) {
1184         ierr = ISRestoreIndices(globalNumbering,&gid);CHKERRQ(ierr);
1185       }
1186       if (clPoints) {
1187         ierr = ISRestoreIndices(clPoints,&clIdx);CHKERRQ(ierr);
1188       }
1189       ierr = PetscSectionSetUp(vertSection);CHKERRQ(ierr);
1190     }
1191     ierr = PetscPartitionerPartition(part, size, numVertices, start, adjacency, vertSection, targetSection, partSection, partition);CHKERRQ(ierr);
1192     ierr = PetscFree(start);CHKERRQ(ierr);
1193     ierr = PetscFree(adjacency);CHKERRQ(ierr);
1194     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1195       const PetscInt *globalNum;
1196       const PetscInt *partIdx;
1197       PetscInt       *map, cStart, cEnd;
1198       PetscInt       *adjusted, i, localSize, offset;
1199       IS             newPartition;
1200 
1201       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
1202       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
1203       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1204       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
1205       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
1206       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1207       for (i = cStart, offset = 0; i < cEnd; i++) {
1208         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1209       }
1210       for (i = 0; i < localSize; i++) {
1211         adjusted[i] = map[partIdx[i]];
1212       }
1213       ierr = PetscFree(map);CHKERRQ(ierr);
1214       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
1215       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1216       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
1217       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
1218       ierr = ISDestroy(partition);CHKERRQ(ierr);
1219       *partition = newPartition;
1220     }
1221   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1222   ierr = PetscSectionDestroy(&vertSection);CHKERRQ(ierr);
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1227 {
1228   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1229   PetscErrorCode          ierr;
1230 
1231   PetscFunctionBegin;
1232   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1233   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1234   ierr = PetscFree(p);CHKERRQ(ierr);
1235   PetscFunctionReturn(0);
1236 }
1237 
1238 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1239 {
1240   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1241   PetscErrorCode          ierr;
1242 
1243   PetscFunctionBegin;
1244   if (p->random) {
1245     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1246     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
1247     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1248   }
1249   PetscFunctionReturn(0);
1250 }
1251 
1252 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1253 {
1254   PetscBool      iascii;
1255   PetscErrorCode ierr;
1256 
1257   PetscFunctionBegin;
1258   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1259   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1260   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1261   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1266 {
1267   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1268   PetscErrorCode          ierr;
1269 
1270   PetscFunctionBegin;
1271   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
1272   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
1273   ierr = PetscOptionsTail();CHKERRQ(ierr);
1274   PetscFunctionReturn(0);
1275 }
1276 
1277 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1278 {
1279   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1280   PetscInt                np;
1281   PetscErrorCode          ierr;
1282 
1283   PetscFunctionBegin;
1284   if (p->random) {
1285     PetscRandom r;
1286     PetscInt   *sizes, *points, v, p;
1287     PetscMPIInt rank;
1288 
1289     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) part), &rank);CHKERRQ(ierr);
1290     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1291     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
1292     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
1293     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
1294     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1295     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1296     for (v = numVertices-1; v > 0; --v) {
1297       PetscReal val;
1298       PetscInt  w, tmp;
1299 
1300       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
1301       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
1302       w    = PetscFloorReal(val);
1303       tmp       = points[v];
1304       points[v] = points[w];
1305       points[w] = tmp;
1306     }
1307     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1308     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
1309     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
1310   }
1311   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1312   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
1313   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1314   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
1315   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1316   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
1317   *partition = p->partition;
1318   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1323 {
1324   PetscFunctionBegin;
1325   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1326   part->ops->view           = PetscPartitionerView_Shell;
1327   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1328   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1329   part->ops->partition      = PetscPartitionerPartition_Shell;
1330   PetscFunctionReturn(0);
1331 }
1332 
1333 /*MC
1334   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
1335 
1336   Level: intermediate
1337 
1338   Options Database Keys:
1339 .  -petscpartitioner_shell_random - Use a random partition
1340 
1341 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1342 M*/
1343 
1344 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1345 {
1346   PetscPartitioner_Shell *p;
1347   PetscErrorCode          ierr;
1348 
1349   PetscFunctionBegin;
1350   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1351   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1352   part->data = p;
1353 
1354   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1355   p->random = PETSC_FALSE;
1356   PetscFunctionReturn(0);
1357 }
1358 
1359 /*@C
1360   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1361 
1362   Collective on PetscPartitioner
1363 
1364   Input Parameters:
1365 + part   - The PetscPartitioner
1366 . size   - The number of partitions
1367 . sizes  - array of length size (or NULL) providing the number of points in each partition
1368 - points - array of length sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1369 
1370   Level: developer
1371 
1372   Notes:
1373     It is safe to free the sizes and points arrays after use in this routine.
1374 
1375 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1376 @*/
1377 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1378 {
1379   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1380   PetscInt                proc, numPoints;
1381   PetscErrorCode          ierr;
1382 
1383   PetscFunctionBegin;
1384   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1385   if (sizes)  {PetscValidPointer(sizes, 3);}
1386   if (points) {PetscValidPointer(points, 4);}
1387   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1388   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1389   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1390   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1391   if (sizes) {
1392     for (proc = 0; proc < size; ++proc) {
1393       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1394     }
1395   }
1396   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1397   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1398   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1399   PetscFunctionReturn(0);
1400 }
1401 
1402 /*@
1403   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1404 
1405   Collective on PetscPartitioner
1406 
1407   Input Parameters:
1408 + part   - The PetscPartitioner
1409 - random - The flag to use a random partition
1410 
1411   Level: intermediate
1412 
1413 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1414 @*/
1415 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1416 {
1417   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1418 
1419   PetscFunctionBegin;
1420   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1421   p->random = random;
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 /*@
1426   PetscPartitionerShellGetRandom - get the flag to use a random partition
1427 
1428   Collective on PetscPartitioner
1429 
1430   Input Parameter:
1431 . part   - The PetscPartitioner
1432 
1433   Output Parameter:
1434 . random - The flag to use a random partition
1435 
1436   Level: intermediate
1437 
1438 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1439 @*/
1440 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1441 {
1442   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1443 
1444   PetscFunctionBegin;
1445   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1446   PetscValidPointer(random, 2);
1447   *random = p->random;
1448   PetscFunctionReturn(0);
1449 }
1450 
1451 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1452 {
1453   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1454   PetscErrorCode          ierr;
1455 
1456   PetscFunctionBegin;
1457   ierr = PetscFree(p);CHKERRQ(ierr);
1458   PetscFunctionReturn(0);
1459 }
1460 
1461 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1462 {
1463   PetscFunctionBegin;
1464   PetscFunctionReturn(0);
1465 }
1466 
1467 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1468 {
1469   PetscBool      iascii;
1470   PetscErrorCode ierr;
1471 
1472   PetscFunctionBegin;
1473   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1474   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1475   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1476   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1481 {
1482   MPI_Comm       comm;
1483   PetscInt       np, *tpwgts = NULL, sumw = 0, numVerticesGlobal  = 0;
1484   PetscMPIInt    size;
1485   PetscErrorCode ierr;
1486 
1487   PetscFunctionBegin;
1488   if (vertSection) { ierr = PetscInfo(part,"PETSCPARTITIONERSIMPLE ignores vertex weights\n");CHKERRQ(ierr); }
1489   comm = PetscObjectComm((PetscObject)part);
1490   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1491   if (targetSection) {
1492     ierr = MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
1493     ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
1494     for (np = 0; np < nparts; ++np) {
1495       ierr = PetscSectionGetDof(targetSection,np,&tpwgts[np]);CHKERRQ(ierr);
1496       sumw += tpwgts[np];
1497     }
1498     if (!sumw) {
1499       ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1500     } else {
1501       PetscInt m,mp;
1502       for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np]*numVerticesGlobal)/sumw;
1503       for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
1504         if (m < tpwgts[np]) { m = tpwgts[np]; mp = np; }
1505         sumw += tpwgts[np];
1506       }
1507       if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
1508     }
1509   }
1510 
1511   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1512   if (size == 1) {
1513     if (tpwgts) {
1514       for (np = 0; np < nparts; ++np) {
1515         ierr = PetscSectionSetDof(partSection, np, tpwgts[np]);CHKERRQ(ierr);
1516       }
1517     } else {
1518       for (np = 0; np < nparts; ++np) {
1519         ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);
1520       }
1521     }
1522   } else {
1523     if (tpwgts) {
1524       Vec         v;
1525       PetscScalar *array;
1526       PetscInt    st,j;
1527       PetscMPIInt rank;
1528 
1529       ierr = VecCreate(comm,&v);CHKERRQ(ierr);
1530       ierr = VecSetSizes(v,numVertices,numVerticesGlobal);CHKERRQ(ierr);
1531       ierr = VecSetType(v,VECSTANDARD);CHKERRQ(ierr);
1532       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1533       for (np = 0,st = 0; np < nparts; ++np) {
1534         if (rank == np || (rank == size-1 && size < nparts && np >= size)) {
1535           for (j = 0; j < tpwgts[np]; j++) {
1536             ierr = VecSetValue(v,st+j,np,INSERT_VALUES);CHKERRQ(ierr);
1537           }
1538         }
1539         st += tpwgts[np];
1540       }
1541       ierr = VecAssemblyBegin(v);CHKERRQ(ierr);
1542       ierr = VecAssemblyEnd(v);CHKERRQ(ierr);
1543       ierr = VecGetArray(v,&array);CHKERRQ(ierr);
1544       for (j = 0; j < numVertices; ++j) {
1545         ierr = PetscSectionAddDof(partSection,PetscRealPart(array[j]),1);CHKERRQ(ierr);
1546       }
1547       ierr = VecRestoreArray(v,&array);CHKERRQ(ierr);
1548       ierr = VecDestroy(&v);CHKERRQ(ierr);
1549     } else {
1550       PetscMPIInt rank;
1551       PetscInt nvGlobal, *offsets, myFirst, myLast;
1552 
1553       ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1554       offsets[0] = 0;
1555       ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1556       for (np = 2; np <= size; np++) {
1557         offsets[np] += offsets[np-1];
1558       }
1559       nvGlobal = offsets[size];
1560       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1561       myFirst = offsets[rank];
1562       myLast  = offsets[rank + 1] - 1;
1563       ierr = PetscFree(offsets);CHKERRQ(ierr);
1564       if (numVertices) {
1565         PetscInt firstPart = 0, firstLargePart = 0;
1566         PetscInt lastPart = 0, lastLargePart = 0;
1567         PetscInt rem = nvGlobal % nparts;
1568         PetscInt pSmall = nvGlobal/nparts;
1569         PetscInt pBig = nvGlobal/nparts + 1;
1570 
1571         if (rem) {
1572           firstLargePart = myFirst / pBig;
1573           lastLargePart  = myLast  / pBig;
1574 
1575           if (firstLargePart < rem) {
1576             firstPart = firstLargePart;
1577           } else {
1578             firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1579           }
1580           if (lastLargePart < rem) {
1581             lastPart = lastLargePart;
1582           } else {
1583             lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1584           }
1585         } else {
1586           firstPart = myFirst / (nvGlobal/nparts);
1587           lastPart  = myLast  / (nvGlobal/nparts);
1588         }
1589 
1590         for (np = firstPart; np <= lastPart; np++) {
1591           PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1592           PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1593 
1594           PartStart = PetscMax(PartStart,myFirst);
1595           PartEnd   = PetscMin(PartEnd,myLast+1);
1596           ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1597         }
1598       }
1599     }
1600   }
1601   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1602   PetscFunctionReturn(0);
1603 }
1604 
1605 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1606 {
1607   PetscFunctionBegin;
1608   part->noGraph        = PETSC_TRUE;
1609   part->ops->view      = PetscPartitionerView_Simple;
1610   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1611   part->ops->partition = PetscPartitionerPartition_Simple;
1612   PetscFunctionReturn(0);
1613 }
1614 
1615 /*MC
1616   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1617 
1618   Level: intermediate
1619 
1620 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1621 M*/
1622 
1623 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1624 {
1625   PetscPartitioner_Simple *p;
1626   PetscErrorCode           ierr;
1627 
1628   PetscFunctionBegin;
1629   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1630   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1631   part->data = p;
1632 
1633   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1634   PetscFunctionReturn(0);
1635 }
1636 
1637 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1638 {
1639   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1640   PetscErrorCode          ierr;
1641 
1642   PetscFunctionBegin;
1643   ierr = PetscFree(p);CHKERRQ(ierr);
1644   PetscFunctionReturn(0);
1645 }
1646 
1647 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1648 {
1649   PetscFunctionBegin;
1650   PetscFunctionReturn(0);
1651 }
1652 
1653 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1654 {
1655   PetscBool      iascii;
1656   PetscErrorCode ierr;
1657 
1658   PetscFunctionBegin;
1659   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1660   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1661   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1662   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1663   PetscFunctionReturn(0);
1664 }
1665 
1666 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1667 {
1668   PetscInt       np;
1669   PetscErrorCode ierr;
1670 
1671   PetscFunctionBegin;
1672   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1673   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1674   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1675   PetscFunctionReturn(0);
1676 }
1677 
1678 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1679 {
1680   PetscFunctionBegin;
1681   part->noGraph        = PETSC_TRUE;
1682   part->ops->view      = PetscPartitionerView_Gather;
1683   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1684   part->ops->partition = PetscPartitionerPartition_Gather;
1685   PetscFunctionReturn(0);
1686 }
1687 
1688 /*MC
1689   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1690 
1691   Level: intermediate
1692 
1693 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1694 M*/
1695 
1696 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1697 {
1698   PetscPartitioner_Gather *p;
1699   PetscErrorCode           ierr;
1700 
1701   PetscFunctionBegin;
1702   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1703   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1704   part->data = p;
1705 
1706   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1707   PetscFunctionReturn(0);
1708 }
1709 
1710 
1711 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1712 {
1713   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1714   PetscErrorCode          ierr;
1715 
1716   PetscFunctionBegin;
1717   ierr = PetscFree(p);CHKERRQ(ierr);
1718   PetscFunctionReturn(0);
1719 }
1720 
1721 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1722 {
1723   PetscFunctionBegin;
1724   PetscFunctionReturn(0);
1725 }
1726 
1727 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1728 {
1729   PetscBool      iascii;
1730   PetscErrorCode ierr;
1731 
1732   PetscFunctionBegin;
1733   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1734   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1735   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1736   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1737   PetscFunctionReturn(0);
1738 }
1739 
1740 #if defined(PETSC_HAVE_CHACO)
1741 #if defined(PETSC_HAVE_UNISTD_H)
1742 #include <unistd.h>
1743 #endif
1744 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1745 #include <chaco.h>
1746 #else
1747 /* Older versions of Chaco do not have an include file */
1748 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1749                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1750                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1751                        int mesh_dims[3], double *goal, int global_method, int local_method,
1752                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1753 #endif
1754 extern int FREE_GRAPH;
1755 #endif
1756 
1757 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1758 {
1759 #if defined(PETSC_HAVE_CHACO)
1760   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1761   MPI_Comm       comm;
1762   int            nvtxs          = numVertices; /* number of vertices in full graph */
1763   int           *vwgts          = NULL;   /* weights for all vertices */
1764   float         *ewgts          = NULL;   /* weights for all edges */
1765   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1766   char          *outassignname  = NULL;   /*  name of assignment output file */
1767   char          *outfilename    = NULL;   /* output file name */
1768   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1769   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1770   int            mesh_dims[3];            /* dimensions of mesh of processors */
1771   double        *goal          = NULL;    /* desired set sizes for each set */
1772   int            global_method = 1;       /* global partitioning algorithm */
1773   int            local_method  = 1;       /* local partitioning algorithm */
1774   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1775   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1776   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1777   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1778   long           seed          = 123636512; /* for random graph mutations */
1779 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1780   int           *assignment;              /* Output partition */
1781 #else
1782   short int     *assignment;              /* Output partition */
1783 #endif
1784   int            fd_stdout, fd_pipe[2];
1785   PetscInt      *points;
1786   int            i, v, p;
1787   PetscErrorCode ierr;
1788 
1789   PetscFunctionBegin;
1790   ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
1791 #if defined (PETSC_USE_DEBUG)
1792   {
1793     int ival,isum;
1794     PetscBool distributed;
1795 
1796     ival = (numVertices > 0);
1797     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1798     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1799     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1800   }
1801 #endif
1802   if (!numVertices) { /* distributed case, return if not holding the graph */
1803     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1804     PetscFunctionReturn(0);
1805   }
1806   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1807   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1808 
1809   if (global_method == INERTIAL_METHOD) {
1810     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1811     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1812   }
1813   mesh_dims[0] = nparts;
1814   mesh_dims[1] = 1;
1815   mesh_dims[2] = 1;
1816   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1817   /* Chaco outputs to stdout. We redirect this to a buffer. */
1818   /* TODO: check error codes for UNIX calls */
1819 #if defined(PETSC_HAVE_UNISTD_H)
1820   {
1821     int piperet;
1822     piperet = pipe(fd_pipe);
1823     if (piperet) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"Could not create pipe");
1824     fd_stdout = dup(1);
1825     close(1);
1826     dup2(fd_pipe[1], 1);
1827   }
1828 #endif
1829   if (part->usevwgt) { ierr = PetscInfo(part,"PETSCPARTITIONERCHACO ignores vertex weights\n");CHKERRQ(ierr); }
1830   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1831                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1832                    vmax, ndims, eigtol, seed);
1833 #if defined(PETSC_HAVE_UNISTD_H)
1834   {
1835     char msgLog[10000];
1836     int  count;
1837 
1838     fflush(stdout);
1839     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1840     if (count < 0) count = 0;
1841     msgLog[count] = 0;
1842     close(1);
1843     dup2(fd_stdout, 1);
1844     close(fd_stdout);
1845     close(fd_pipe[0]);
1846     close(fd_pipe[1]);
1847     if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1848   }
1849 #else
1850   if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1851 #endif
1852   /* Convert to PetscSection+IS */
1853   for (v = 0; v < nvtxs; ++v) {
1854     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1855   }
1856   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1857   for (p = 0, i = 0; p < nparts; ++p) {
1858     for (v = 0; v < nvtxs; ++v) {
1859       if (assignment[v] == p) points[i++] = v;
1860     }
1861   }
1862   if (i != nvtxs) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1863   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1864   if (global_method == INERTIAL_METHOD) {
1865     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1866   }
1867   ierr = PetscFree(assignment);CHKERRQ(ierr);
1868   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1869   PetscFunctionReturn(0);
1870 #else
1871   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1872 #endif
1873 }
1874 
1875 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1876 {
1877   PetscFunctionBegin;
1878   part->noGraph        = PETSC_FALSE;
1879   part->ops->view      = PetscPartitionerView_Chaco;
1880   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1881   part->ops->partition = PetscPartitionerPartition_Chaco;
1882   PetscFunctionReturn(0);
1883 }
1884 
1885 /*MC
1886   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1887 
1888   Level: intermediate
1889 
1890 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1891 M*/
1892 
1893 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1894 {
1895   PetscPartitioner_Chaco *p;
1896   PetscErrorCode          ierr;
1897 
1898   PetscFunctionBegin;
1899   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1900   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1901   part->data = p;
1902 
1903   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1904   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1905   PetscFunctionReturn(0);
1906 }
1907 
1908 static const char *ptypes[] = {"kway", "rb"};
1909 
1910 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1911 {
1912   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1913   PetscErrorCode             ierr;
1914 
1915   PetscFunctionBegin;
1916   ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
1917   ierr = PetscFree(p);CHKERRQ(ierr);
1918   PetscFunctionReturn(0);
1919 }
1920 
1921 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1922 {
1923   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1924   PetscErrorCode             ierr;
1925 
1926   PetscFunctionBegin;
1927   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1928   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1929   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1930   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1931   ierr = PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);CHKERRQ(ierr);
1932   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1933   PetscFunctionReturn(0);
1934 }
1935 
1936 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1937 {
1938   PetscBool      iascii;
1939   PetscErrorCode ierr;
1940 
1941   PetscFunctionBegin;
1942   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1943   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1944   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1945   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1946   PetscFunctionReturn(0);
1947 }
1948 
1949 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1950 {
1951   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1952   PetscErrorCode            ierr;
1953 
1954   PetscFunctionBegin;
1955   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1956   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1957   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1958   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1959   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);CHKERRQ(ierr);
1960   ierr = PetscOptionsTail();CHKERRQ(ierr);
1961   PetscFunctionReturn(0);
1962 }
1963 
1964 #if defined(PETSC_HAVE_PARMETIS)
1965 #include <parmetis.h>
1966 #endif
1967 
1968 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1969 {
1970 #if defined(PETSC_HAVE_PARMETIS)
1971   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1972   MPI_Comm       comm;
1973   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1974   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1975   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1976   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1977   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1978   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1979   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1980   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1981   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1982   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1983   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1984   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1985   PetscInt       options[64];               /* Options */
1986   PetscInt       v, i, *assignment, *points;
1987   PetscMPIInt    p, size, rank;
1988   PetscBool      hasempty = PETSC_FALSE;
1989   PetscErrorCode ierr;
1990 
1991   PetscFunctionBegin;
1992   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1993   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1994   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1995   /* Calculate vertex distribution */
1996   ierr = PetscMalloc4(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1997   vtxdist[0] = 0;
1998   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1999   for (p = 2; p <= size; ++p) {
2000     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
2001     vtxdist[p] += vtxdist[p-1];
2002   }
2003   /* null graph */
2004   if (vtxdist[size] == 0) {
2005     ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
2006     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2007     PetscFunctionReturn(0);
2008   }
2009   /* Calculate partition weights */
2010   if (targetSection) {
2011     PetscInt p;
2012     real_t   sumt = 0.0;
2013 
2014     for (p = 0; p < nparts; ++p) {
2015       PetscInt tpd;
2016 
2017       ierr = PetscSectionGetDof(targetSection,p,&tpd);CHKERRQ(ierr);
2018       sumt += tpd;
2019       tpwgts[p] = tpd;
2020     }
2021     if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
2022       for (p = 0, sumt = 0.0; p < nparts; ++p) {
2023         tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
2024         sumt += tpwgts[p];
2025       }
2026       for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
2027       for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
2028       tpwgts[nparts - 1] = 1. - sumt;
2029     }
2030   } else {
2031     for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0/nparts;
2032   }
2033   ubvec[0] = pm->imbalanceRatio;
2034 
2035   /* Weight cells */
2036   if (vertSection) {
2037     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
2038     for (v = 0; v < nvtxs; ++v) {
2039       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
2040     }
2041     wgtflag |= 2; /* have weights on graph vertices */
2042   }
2043 
2044   for (p = 0; !vtxdist[p+1] && p < size; ++p);
2045   if (vtxdist[p+1] == vtxdist[size]) {
2046     if (rank == p) {
2047       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
2048       options[METIS_OPTION_DBGLVL] = pm->debugFlag;
2049       options[METIS_OPTION_SEED]   = pm->randomSeed;
2050       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
2051       if (metis_ptype == 1) {
2052         PetscStackPush("METIS_PartGraphRecursive");
2053         ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2054         PetscStackPop;
2055         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
2056       } else {
2057         /*
2058          It would be nice to activate the two options below, but they would need some actual testing.
2059          - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
2060          - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
2061         */
2062         /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
2063         /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
2064         PetscStackPush("METIS_PartGraphKway");
2065         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2066         PetscStackPop;
2067         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
2068       }
2069     }
2070   } else {
2071     MPI_Comm pcomm;
2072 
2073     options[0] = 1; /*use options */
2074     options[1] = pm->debugFlag;
2075     options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
2076 
2077     if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
2078       PetscInt cnt;
2079 
2080       ierr = MPI_Comm_split(pm->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
2081       for (p=0,cnt=0;p<size;p++) {
2082         if (vtxdist[p+1] != vtxdist[p]) {
2083           vtxdist[cnt+1] = vtxdist[p+1];
2084           cnt++;
2085         }
2086       }
2087     } else pcomm = pm->pcomm;
2088     if (nvtxs) {
2089       PetscStackPush("ParMETIS_V3_PartKway");
2090       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
2091       PetscStackPop;
2092       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
2093     }
2094     if (hasempty) {
2095       ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
2096     }
2097   }
2098 
2099   /* Convert to PetscSection+IS */
2100   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2101   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2102   for (p = 0, i = 0; p < nparts; ++p) {
2103     for (v = 0; v < nvtxs; ++v) {
2104       if (assignment[v] == p) points[i++] = v;
2105     }
2106   }
2107   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2108   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2109   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
2110   ierr = PetscFree(vwgt);CHKERRQ(ierr);
2111   PetscFunctionReturn(0);
2112 #else
2113   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
2114 #endif
2115 }
2116 
2117 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
2118 {
2119   PetscFunctionBegin;
2120   part->noGraph             = PETSC_FALSE;
2121   part->ops->view           = PetscPartitionerView_ParMetis;
2122   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
2123   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
2124   part->ops->partition      = PetscPartitionerPartition_ParMetis;
2125   PetscFunctionReturn(0);
2126 }
2127 
2128 /*MC
2129   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
2130 
2131   Level: intermediate
2132 
2133   Options Database Keys:
2134 +  -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
2135 .  -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
2136 .  -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
2137 -  -petscpartitioner_parmetis_seed <int> - Random seed
2138 
2139   Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
2140 
2141 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2142 M*/
2143 
2144 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
2145 {
2146   PetscPartitioner_ParMetis *p;
2147   PetscErrorCode          ierr;
2148 
2149   PetscFunctionBegin;
2150   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2151   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
2152   part->data = p;
2153 
2154   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
2155   p->ptype          = 0;
2156   p->imbalanceRatio = 1.05;
2157   p->debugFlag      = 0;
2158   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
2159 
2160   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
2161   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
2162   PetscFunctionReturn(0);
2163 }
2164 
2165 #if defined(PETSC_HAVE_PTSCOTCH)
2166 
2167 EXTERN_C_BEGIN
2168 #include <ptscotch.h>
2169 EXTERN_C_END
2170 
2171 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
2172 
2173 static int PTScotch_Strategy(PetscInt strategy)
2174 {
2175   switch (strategy) {
2176   case  0: return SCOTCH_STRATDEFAULT;
2177   case  1: return SCOTCH_STRATQUALITY;
2178   case  2: return SCOTCH_STRATSPEED;
2179   case  3: return SCOTCH_STRATBALANCE;
2180   case  4: return SCOTCH_STRATSAFETY;
2181   case  5: return SCOTCH_STRATSCALABILITY;
2182   case  6: return SCOTCH_STRATRECURSIVE;
2183   case  7: return SCOTCH_STRATREMAP;
2184   default: return SCOTCH_STRATDEFAULT;
2185   }
2186 }
2187 
2188 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2189                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
2190 {
2191   SCOTCH_Graph   grafdat;
2192   SCOTCH_Strat   stradat;
2193   SCOTCH_Num     vertnbr = n;
2194   SCOTCH_Num     edgenbr = xadj[n];
2195   SCOTCH_Num*    velotab = vtxwgt;
2196   SCOTCH_Num*    edlotab = adjwgt;
2197   SCOTCH_Num     flagval = strategy;
2198   double         kbalval = imbalance;
2199   PetscErrorCode ierr;
2200 
2201   PetscFunctionBegin;
2202   {
2203     PetscBool flg = PETSC_TRUE;
2204     ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
2205     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2206     if (!flg) velotab = NULL;
2207   }
2208   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
2209   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
2210   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2211   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
2212   if (tpart) {
2213     SCOTCH_Arch archdat;
2214     ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2215     ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2216     ierr = SCOTCH_graphMap(&grafdat, &archdat, &stradat, part);CHKERRPTSCOTCH(ierr);
2217     SCOTCH_archExit(&archdat);
2218   } else {
2219     ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
2220   }
2221   SCOTCH_stratExit(&stradat);
2222   SCOTCH_graphExit(&grafdat);
2223   PetscFunctionReturn(0);
2224 }
2225 
2226 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2227                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
2228 {
2229   PetscMPIInt     procglbnbr;
2230   PetscMPIInt     proclocnum;
2231   SCOTCH_Arch     archdat;
2232   SCOTCH_Dgraph   grafdat;
2233   SCOTCH_Dmapping mappdat;
2234   SCOTCH_Strat    stradat;
2235   SCOTCH_Num      vertlocnbr;
2236   SCOTCH_Num      edgelocnbr;
2237   SCOTCH_Num*     veloloctab = vtxwgt;
2238   SCOTCH_Num*     edloloctab = adjwgt;
2239   SCOTCH_Num      flagval = strategy;
2240   double          kbalval = imbalance;
2241   PetscErrorCode  ierr;
2242 
2243   PetscFunctionBegin;
2244   {
2245     PetscBool flg = PETSC_TRUE;
2246     ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
2247     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2248     if (!flg) veloloctab = NULL;
2249   }
2250   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
2251   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
2252   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
2253   edgelocnbr = xadj[vertlocnbr];
2254 
2255   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
2256   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
2257   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2258   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
2259   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2260   if (tpart) { /* target partition weights */
2261     ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2262   } else {
2263     ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2264   }
2265   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
2266 
2267   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2268   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2269   SCOTCH_archExit(&archdat);
2270   SCOTCH_stratExit(&stradat);
2271   SCOTCH_dgraphExit(&grafdat);
2272   PetscFunctionReturn(0);
2273 }
2274 
2275 #endif /* PETSC_HAVE_PTSCOTCH */
2276 
2277 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2278 {
2279   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2280   PetscErrorCode             ierr;
2281 
2282   PetscFunctionBegin;
2283   ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
2284   ierr = PetscFree(p);CHKERRQ(ierr);
2285   PetscFunctionReturn(0);
2286 }
2287 
2288 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2289 {
2290   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2291   PetscErrorCode            ierr;
2292 
2293   PetscFunctionBegin;
2294   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
2295   ierr = PetscViewerASCIIPrintf(viewer,"using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
2296   ierr = PetscViewerASCIIPrintf(viewer,"using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
2297   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
2298   PetscFunctionReturn(0);
2299 }
2300 
2301 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2302 {
2303   PetscBool      iascii;
2304   PetscErrorCode ierr;
2305 
2306   PetscFunctionBegin;
2307   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2308   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
2309   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
2310   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
2311   PetscFunctionReturn(0);
2312 }
2313 
2314 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2315 {
2316   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2317   const char *const         *slist = PTScotchStrategyList;
2318   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2319   PetscBool                 flag;
2320   PetscErrorCode            ierr;
2321 
2322   PetscFunctionBegin;
2323   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
2324   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
2325   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
2326   ierr = PetscOptionsTail();CHKERRQ(ierr);
2327   PetscFunctionReturn(0);
2328 }
2329 
2330 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
2331 {
2332 #if defined(PETSC_HAVE_PTSCOTCH)
2333   MPI_Comm       comm;
2334   PetscInt       nvtxs = numVertices;   /* The number of vertices in full graph */
2335   PetscInt       *vtxdist;              /* Distribution of vertices across processes */
2336   PetscInt       *xadj   = start;       /* Start of edge list for each vertex */
2337   PetscInt       *adjncy = adjacency;   /* Edge lists for all vertices */
2338   PetscInt       *vwgt   = NULL;        /* Vertex weights */
2339   PetscInt       *adjwgt = NULL;        /* Edge weights */
2340   PetscInt       v, i, *assignment, *points;
2341   PetscMPIInt    size, rank, p;
2342   PetscBool      hasempty = PETSC_FALSE;
2343   PetscInt       *tpwgts = NULL;
2344   PetscErrorCode ierr;
2345 
2346   PetscFunctionBegin;
2347   ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
2348   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2349   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2350   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
2351   /* Calculate vertex distribution */
2352   vtxdist[0] = 0;
2353   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2354   for (p = 2; p <= size; ++p) {
2355     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
2356     vtxdist[p] += vtxdist[p-1];
2357   }
2358   /* null graph */
2359   if (vtxdist[size] == 0) {
2360     ierr = PetscFree2(vtxdist, assignment);CHKERRQ(ierr);
2361     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2362     PetscFunctionReturn(0);
2363   }
2364 
2365   /* Calculate vertex weights */
2366   if (vertSection) {
2367     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
2368     for (v = 0; v < nvtxs; ++v) {
2369       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
2370     }
2371   }
2372 
2373   /* Calculate partition weights */
2374   if (targetSection) {
2375     PetscInt sumw;
2376 
2377     ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
2378     for (p = 0, sumw = 0; p < nparts; ++p) {
2379       ierr = PetscSectionGetDof(targetSection,p,&tpwgts[p]);CHKERRQ(ierr);
2380       sumw += tpwgts[p];
2381     }
2382     if (!sumw) {
2383       ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2384     }
2385   }
2386 
2387   {
2388     PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2389     int                       strat = PTScotch_Strategy(pts->strategy);
2390     double                    imbal = (double)pts->imbalance;
2391 
2392     for (p = 0; !vtxdist[p+1] && p < size; ++p);
2393     if (vtxdist[p+1] == vtxdist[size]) {
2394       if (rank == p) {
2395         ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment);CHKERRQ(ierr);
2396       }
2397     } else {
2398       PetscInt cnt;
2399       MPI_Comm pcomm;
2400 
2401       if (hasempty) {
2402         ierr = MPI_Comm_split(pts->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
2403         for (p=0,cnt=0;p<size;p++) {
2404           if (vtxdist[p+1] != vtxdist[p]) {
2405             vtxdist[cnt+1] = vtxdist[p+1];
2406             cnt++;
2407           }
2408         }
2409       } else pcomm = pts->pcomm;
2410       if (nvtxs) {
2411         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm);CHKERRQ(ierr);
2412       }
2413       if (hasempty) {
2414         ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
2415       }
2416     }
2417   }
2418   ierr = PetscFree(vwgt);CHKERRQ(ierr);
2419   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2420 
2421   /* Convert to PetscSection+IS */
2422   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2423   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2424   for (p = 0, i = 0; p < nparts; ++p) {
2425     for (v = 0; v < nvtxs; ++v) {
2426       if (assignment[v] == p) points[i++] = v;
2427     }
2428   }
2429   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2430   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2431 
2432   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
2433   PetscFunctionReturn(0);
2434 #else
2435   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2436 #endif
2437 }
2438 
2439 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2440 {
2441   PetscFunctionBegin;
2442   part->noGraph             = PETSC_FALSE;
2443   part->ops->view           = PetscPartitionerView_PTScotch;
2444   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2445   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2446   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2447   PetscFunctionReturn(0);
2448 }
2449 
2450 /*MC
2451   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2452 
2453   Level: intermediate
2454 
2455   Options Database Keys:
2456 +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
2457 -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
2458 
2459   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
2460 
2461 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2462 M*/
2463 
2464 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2465 {
2466   PetscPartitioner_PTScotch *p;
2467   PetscErrorCode          ierr;
2468 
2469   PetscFunctionBegin;
2470   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2471   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2472   part->data = p;
2473 
2474   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
2475   p->strategy  = 0;
2476   p->imbalance = 0.01;
2477 
2478   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2479   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2480   PetscFunctionReturn(0);
2481 }
2482 
2483 /*@
2484   DMPlexGetPartitioner - Get the mesh partitioner
2485 
2486   Not collective
2487 
2488   Input Parameter:
2489 . dm - The DM
2490 
2491   Output Parameter:
2492 . part - The PetscPartitioner
2493 
2494   Level: developer
2495 
2496   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2497 
2498 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerDMPlexPartition(), PetscPartitionerCreate()
2499 @*/
2500 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2501 {
2502   DM_Plex       *mesh = (DM_Plex *) dm->data;
2503 
2504   PetscFunctionBegin;
2505   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2506   PetscValidPointer(part, 2);
2507   *part = mesh->partitioner;
2508   PetscFunctionReturn(0);
2509 }
2510 
2511 /*@
2512   DMPlexSetPartitioner - Set the mesh partitioner
2513 
2514   logically collective on DM
2515 
2516   Input Parameters:
2517 + dm - The DM
2518 - part - The partitioner
2519 
2520   Level: developer
2521 
2522   Note: Any existing PetscPartitioner will be destroyed.
2523 
2524 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2525 @*/
2526 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2527 {
2528   DM_Plex       *mesh = (DM_Plex *) dm->data;
2529   PetscErrorCode ierr;
2530 
2531   PetscFunctionBegin;
2532   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2533   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2534   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2535   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2536   mesh->partitioner = part;
2537   PetscFunctionReturn(0);
2538 }
2539 
2540 static PetscErrorCode DMPlexAddClosure_Private(DM dm, PetscHSetI ht, PetscInt point)
2541 {
2542   const PetscInt *cone;
2543   PetscInt       coneSize, c;
2544   PetscBool      missing;
2545   PetscErrorCode ierr;
2546 
2547   PetscFunctionBeginHot;
2548   ierr = PetscHSetIQueryAdd(ht, point, &missing);CHKERRQ(ierr);
2549   if (missing) {
2550     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2551     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2552     for (c = 0; c < coneSize; c++) {
2553       ierr = DMPlexAddClosure_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2554     }
2555   }
2556   PetscFunctionReturn(0);
2557 }
2558 
2559 PETSC_UNUSED static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2560 {
2561   PetscErrorCode ierr;
2562 
2563   PetscFunctionBegin;
2564   if (up) {
2565     PetscInt parent;
2566 
2567     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2568     if (parent != point) {
2569       PetscInt closureSize, *closure = NULL, i;
2570 
2571       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2572       for (i = 0; i < closureSize; i++) {
2573         PetscInt cpoint = closure[2*i];
2574 
2575         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2576         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2577       }
2578       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2579     }
2580   }
2581   if (down) {
2582     PetscInt numChildren;
2583     const PetscInt *children;
2584 
2585     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2586     if (numChildren) {
2587       PetscInt i;
2588 
2589       for (i = 0; i < numChildren; i++) {
2590         PetscInt cpoint = children[i];
2591 
2592         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2593         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2594       }
2595     }
2596   }
2597   PetscFunctionReturn(0);
2598 }
2599 
2600 static PetscErrorCode DMPlexAddClosureTree_Up_Private(DM dm, PetscHSetI ht, PetscInt point)
2601 {
2602   PetscInt       parent;
2603   PetscErrorCode ierr;
2604 
2605   PetscFunctionBeginHot;
2606   ierr = DMPlexGetTreeParent(dm, point, &parent,NULL);CHKERRQ(ierr);
2607   if (point != parent) {
2608     const PetscInt *cone;
2609     PetscInt       coneSize, c;
2610 
2611     ierr = DMPlexAddClosureTree_Up_Private(dm, ht, parent);CHKERRQ(ierr);
2612     ierr = DMPlexAddClosure_Private(dm, ht, parent);CHKERRQ(ierr);
2613     ierr = DMPlexGetCone(dm, parent, &cone);CHKERRQ(ierr);
2614     ierr = DMPlexGetConeSize(dm, parent, &coneSize);CHKERRQ(ierr);
2615     for (c = 0; c < coneSize; c++) {
2616       const PetscInt cp = cone[c];
2617 
2618       ierr = DMPlexAddClosureTree_Up_Private(dm, ht, cp);CHKERRQ(ierr);
2619     }
2620   }
2621   PetscFunctionReturn(0);
2622 }
2623 
2624 static PetscErrorCode DMPlexAddClosureTree_Down_Private(DM dm, PetscHSetI ht, PetscInt point)
2625 {
2626   PetscInt       i, numChildren;
2627   const PetscInt *children;
2628   PetscErrorCode ierr;
2629 
2630   PetscFunctionBeginHot;
2631   ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
2632   for (i = 0; i < numChildren; i++) {
2633     ierr = PetscHSetIAdd(ht, children[i]);CHKERRQ(ierr);
2634   }
2635   PetscFunctionReturn(0);
2636 }
2637 
2638 static PetscErrorCode DMPlexAddClosureTree_Private(DM dm, PetscHSetI ht, PetscInt point)
2639 {
2640   const PetscInt *cone;
2641   PetscInt       coneSize, c;
2642   PetscErrorCode ierr;
2643 
2644   PetscFunctionBeginHot;
2645   ierr = PetscHSetIAdd(ht, point);CHKERRQ(ierr);
2646   ierr = DMPlexAddClosureTree_Up_Private(dm, ht, point);CHKERRQ(ierr);
2647   ierr = DMPlexAddClosureTree_Down_Private(dm, ht, point);CHKERRQ(ierr);
2648   ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2649   ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2650   for (c = 0; c < coneSize; c++) {
2651     ierr = DMPlexAddClosureTree_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2652   }
2653   PetscFunctionReturn(0);
2654 }
2655 
2656 PetscErrorCode DMPlexClosurePoints_Private(DM dm, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2657 {
2658   DM_Plex         *mesh = (DM_Plex *)dm->data;
2659   const PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2660   PetscInt        nelems, *elems, off = 0, p;
2661   PetscHSetI      ht;
2662   PetscErrorCode  ierr;
2663 
2664   PetscFunctionBegin;
2665   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2666   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2667   if (!hasTree) {
2668     for (p = 0; p < numPoints; ++p) {
2669       ierr = DMPlexAddClosure_Private(dm, ht, points[p]);CHKERRQ(ierr);
2670     }
2671   } else {
2672 #if 1
2673     for (p = 0; p < numPoints; ++p) {
2674       ierr = DMPlexAddClosureTree_Private(dm, ht, points[p]);CHKERRQ(ierr);
2675     }
2676 #else
2677     PetscInt  *closure = NULL, closureSize, c;
2678     for (p = 0; p < numPoints; ++p) {
2679       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2680       for (c = 0; c < closureSize*2; c += 2) {
2681         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2682         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2683       }
2684     }
2685     if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2686 #endif
2687   }
2688   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2689   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2690   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2691   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2692   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2693   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2694   PetscFunctionReturn(0);
2695 }
2696 
2697 /*@
2698   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2699 
2700   Input Parameters:
2701 + dm     - The DM
2702 - label  - DMLabel assinging ranks to remote roots
2703 
2704   Level: developer
2705 
2706 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2707 @*/
2708 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2709 {
2710   IS              rankIS,   pointIS, closureIS;
2711   const PetscInt *ranks,   *points;
2712   PetscInt        numRanks, numPoints, r;
2713   PetscErrorCode  ierr;
2714 
2715   PetscFunctionBegin;
2716   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2717   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2718   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2719   for (r = 0; r < numRanks; ++r) {
2720     const PetscInt rank = ranks[r];
2721     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2722     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2723     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2724     ierr = DMPlexClosurePoints_Private(dm, numPoints, points, &closureIS);CHKERRQ(ierr);
2725     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2726     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2727     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2728     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2729   }
2730   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2731   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2732   PetscFunctionReturn(0);
2733 }
2734 
2735 /*@
2736   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2737 
2738   Input Parameters:
2739 + dm     - The DM
2740 - label  - DMLabel assinging ranks to remote roots
2741 
2742   Level: developer
2743 
2744 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2745 @*/
2746 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2747 {
2748   IS              rankIS,   pointIS;
2749   const PetscInt *ranks,   *points;
2750   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2751   PetscInt       *adj = NULL;
2752   PetscErrorCode  ierr;
2753 
2754   PetscFunctionBegin;
2755   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2756   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2757   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2758   for (r = 0; r < numRanks; ++r) {
2759     const PetscInt rank = ranks[r];
2760 
2761     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2762     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2763     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2764     for (p = 0; p < numPoints; ++p) {
2765       adjSize = PETSC_DETERMINE;
2766       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2767       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2768     }
2769     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2770     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2771   }
2772   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2773   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2774   ierr = PetscFree(adj);CHKERRQ(ierr);
2775   PetscFunctionReturn(0);
2776 }
2777 
2778 /*@
2779   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2780 
2781   Input Parameters:
2782 + dm     - The DM
2783 - label  - DMLabel assinging ranks to remote roots
2784 
2785   Level: developer
2786 
2787   Note: This is required when generating multi-level overlaps to capture
2788   overlap points from non-neighbouring partitions.
2789 
2790 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2791 @*/
2792 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2793 {
2794   MPI_Comm        comm;
2795   PetscMPIInt     rank;
2796   PetscSF         sfPoint;
2797   DMLabel         lblRoots, lblLeaves;
2798   IS              rankIS, pointIS;
2799   const PetscInt *ranks;
2800   PetscInt        numRanks, r;
2801   PetscErrorCode  ierr;
2802 
2803   PetscFunctionBegin;
2804   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2805   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2806   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2807   /* Pull point contributions from remote leaves into local roots */
2808   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2809   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2810   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2811   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2812   for (r = 0; r < numRanks; ++r) {
2813     const PetscInt remoteRank = ranks[r];
2814     if (remoteRank == rank) continue;
2815     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2816     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2817     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2818   }
2819   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2820   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2821   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2822   /* Push point contributions from roots into remote leaves */
2823   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2824   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2825   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2826   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2827   for (r = 0; r < numRanks; ++r) {
2828     const PetscInt remoteRank = ranks[r];
2829     if (remoteRank == rank) continue;
2830     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2831     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2832     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2833   }
2834   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2835   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2836   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2837   PetscFunctionReturn(0);
2838 }
2839 
2840 /*@
2841   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2842 
2843   Input Parameters:
2844 + dm        - The DM
2845 . rootLabel - DMLabel assinging ranks to local roots
2846 - processSF - A star forest mapping into the local index on each remote rank
2847 
2848   Output Parameter:
2849 . leafLabel - DMLabel assinging ranks to remote roots
2850 
2851   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2852   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2853 
2854   Level: developer
2855 
2856 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2857 @*/
2858 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2859 {
2860   MPI_Comm           comm;
2861   PetscMPIInt        rank, size, r;
2862   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2863   PetscSF            sfPoint;
2864   PetscSection       rootSection;
2865   PetscSFNode       *rootPoints, *leafPoints;
2866   const PetscSFNode *remote;
2867   const PetscInt    *local, *neighbors;
2868   IS                 valueIS;
2869   PetscBool          mpiOverflow = PETSC_FALSE;
2870   PetscErrorCode     ierr;
2871 
2872   PetscFunctionBegin;
2873   ierr = PetscLogEventBegin(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2874   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2875   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2876   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2877   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2878 
2879   /* Convert to (point, rank) and use actual owners */
2880   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2881   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2882   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2883   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2884   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2885   for (n = 0; n < numNeighbors; ++n) {
2886     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2887     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2888   }
2889   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2890   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2891   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2892   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2893   for (n = 0; n < numNeighbors; ++n) {
2894     IS              pointIS;
2895     const PetscInt *points;
2896 
2897     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2898     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2899     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2900     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2901     for (p = 0; p < numPoints; ++p) {
2902       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2903       else       {l = -1;}
2904       if (l >= 0) {rootPoints[off+p] = remote[l];}
2905       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2906     }
2907     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2908     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2909   }
2910 
2911   /* Try to communicate overlap using All-to-All */
2912   if (!processSF) {
2913     PetscInt64  counter = 0;
2914     PetscBool   locOverflow = PETSC_FALSE;
2915     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2916 
2917     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2918     for (n = 0; n < numNeighbors; ++n) {
2919       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2920       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2921 #if defined(PETSC_USE_64BIT_INDICES)
2922       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2923       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2924 #endif
2925       scounts[neighbors[n]] = (PetscMPIInt) dof;
2926       sdispls[neighbors[n]] = (PetscMPIInt) off;
2927     }
2928     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2929     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2930     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2931     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2932     if (!mpiOverflow) {
2933       ierr = PetscInfo(dm,"Using Alltoallv for mesh distribution\n");CHKERRQ(ierr);
2934       leafSize = (PetscInt) counter;
2935       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2936       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2937     }
2938     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2939   }
2940 
2941   /* Communicate overlap using process star forest */
2942   if (processSF || mpiOverflow) {
2943     PetscSF      procSF;
2944     PetscSection leafSection;
2945 
2946     if (processSF) {
2947       ierr = PetscInfo(dm,"Using processSF for mesh distribution\n");CHKERRQ(ierr);
2948       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2949       procSF = processSF;
2950     } else {
2951       ierr = PetscInfo(dm,"Using processSF for mesh distribution (MPI overflow)\n");CHKERRQ(ierr);
2952       ierr = PetscSFCreate(comm,&procSF);CHKERRQ(ierr);
2953       ierr = PetscSFSetGraphWithPattern(procSF,NULL,PETSCSF_PATTERN_ALLTOALL);CHKERRQ(ierr);
2954     }
2955 
2956     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2957     ierr = DMPlexDistributeData(dm, procSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2958     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2959     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2960     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2961   }
2962 
2963   for (p = 0; p < leafSize; p++) {
2964     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2965   }
2966 
2967   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2968   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2969   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2970   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2971   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2972   ierr = PetscLogEventEnd(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2973   PetscFunctionReturn(0);
2974 }
2975 
2976 /*@
2977   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2978 
2979   Input Parameters:
2980 + dm    - The DM
2981 - label - DMLabel assinging ranks to remote roots
2982 
2983   Output Parameter:
2984 . sf    - The star forest communication context encapsulating the defined mapping
2985 
2986   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2987 
2988   Level: developer
2989 
2990 .seealso: DMPlexDistribute(), DMPlexCreateOverlap()
2991 @*/
2992 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2993 {
2994   PetscMPIInt     rank;
2995   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0, nNeighbors;
2996   PetscSFNode    *remotePoints;
2997   IS              remoteRootIS, neighborsIS;
2998   const PetscInt *remoteRoots, *neighbors;
2999   PetscErrorCode ierr;
3000 
3001   PetscFunctionBegin;
3002   ierr = PetscLogEventBegin(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
3003   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
3004 
3005   ierr = DMLabelGetValueIS(label, &neighborsIS);CHKERRQ(ierr);
3006 #if 0
3007   {
3008     IS is;
3009     ierr = ISDuplicate(neighborsIS, &is);CHKERRQ(ierr);
3010     ierr = ISSort(is);CHKERRQ(ierr);
3011     ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
3012     neighborsIS = is;
3013   }
3014 #endif
3015   ierr = ISGetLocalSize(neighborsIS, &nNeighbors);CHKERRQ(ierr);
3016   ierr = ISGetIndices(neighborsIS, &neighbors);CHKERRQ(ierr);
3017   for (numRemote = 0, n = 0; n < nNeighbors; ++n) {
3018     ierr = DMLabelGetStratumSize(label, neighbors[n], &numPoints);CHKERRQ(ierr);
3019     numRemote += numPoints;
3020   }
3021   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
3022   /* Put owned points first */
3023   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
3024   if (numPoints > 0) {
3025     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
3026     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3027     for (p = 0; p < numPoints; p++) {
3028       remotePoints[idx].index = remoteRoots[p];
3029       remotePoints[idx].rank = rank;
3030       idx++;
3031     }
3032     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3033     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
3034   }
3035   /* Now add remote points */
3036   for (n = 0; n < nNeighbors; ++n) {
3037     const PetscInt nn = neighbors[n];
3038 
3039     ierr = DMLabelGetStratumSize(label, nn, &numPoints);CHKERRQ(ierr);
3040     if (nn == rank || numPoints <= 0) continue;
3041     ierr = DMLabelGetStratumIS(label, nn, &remoteRootIS);CHKERRQ(ierr);
3042     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3043     for (p = 0; p < numPoints; p++) {
3044       remotePoints[idx].index = remoteRoots[p];
3045       remotePoints[idx].rank = nn;
3046       idx++;
3047     }
3048     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3049     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
3050   }
3051   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
3052   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3053   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
3054   ierr = PetscSFSetUp(*sf);CHKERRQ(ierr);
3055   ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
3056   ierr = PetscLogEventEnd(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
3057   PetscFunctionReturn(0);
3058 }
3059 
3060 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
3061  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
3062  * them out in that case. */
3063 #if defined(PETSC_HAVE_PARMETIS)
3064 /*@C
3065 
3066   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
3067 
3068   Input parameters:
3069 + dm                - The DMPlex object.
3070 . n                 - The number of points.
3071 . pointsToRewrite   - The points in the SF whose ownership will change.
3072 . targetOwners      - New owner for each element in pointsToRewrite.
3073 - degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
3074 
3075   Level: developer
3076 
3077 @*/
3078 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
3079 {
3080   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
3081   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
3082   PetscSFNode  *leafLocationsNew;
3083   const         PetscSFNode *iremote;
3084   const         PetscInt *ilocal;
3085   PetscBool    *isLeaf;
3086   PetscSF       sf;
3087   MPI_Comm      comm;
3088   PetscMPIInt   rank, size;
3089 
3090   PetscFunctionBegin;
3091   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3092   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3093   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3094   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3095 
3096   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3097   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3098   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3099   for (i=0; i<pEnd-pStart; i++) {
3100     isLeaf[i] = PETSC_FALSE;
3101   }
3102   for (i=0; i<nleafs; i++) {
3103     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3104   }
3105 
3106   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
3107   cumSumDegrees[0] = 0;
3108   for (i=1; i<=pEnd-pStart; i++) {
3109     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
3110   }
3111   sumDegrees = cumSumDegrees[pEnd-pStart];
3112   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
3113 
3114   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
3115   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
3116   for (i=0; i<pEnd-pStart; i++) {
3117     rankOnLeafs[i] = rank;
3118   }
3119   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
3120   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
3121   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
3122 
3123   /* get the remote local points of my leaves */
3124   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
3125   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
3126   for (i=0; i<pEnd-pStart; i++) {
3127     points[i] = pStart+i;
3128   }
3129   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
3130   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
3131   ierr = PetscFree(points);CHKERRQ(ierr);
3132   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
3133   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
3134   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
3135   for (i=0; i<pEnd-pStart; i++) {
3136     newOwners[i] = -1;
3137     newNumbers[i] = -1;
3138   }
3139   {
3140     PetscInt oldNumber, newNumber, oldOwner, newOwner;
3141     for (i=0; i<n; i++) {
3142       oldNumber = pointsToRewrite[i];
3143       newNumber = -1;
3144       oldOwner = rank;
3145       newOwner = targetOwners[i];
3146       if (oldOwner == newOwner) {
3147         newNumber = oldNumber;
3148       } else {
3149         for (j=0; j<degrees[oldNumber]; j++) {
3150           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
3151             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
3152             break;
3153           }
3154         }
3155       }
3156       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
3157 
3158       newOwners[oldNumber] = newOwner;
3159       newNumbers[oldNumber] = newNumber;
3160     }
3161   }
3162   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
3163   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
3164   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
3165 
3166   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
3167   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
3168   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
3169   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
3170 
3171   /* Now count how many leafs we have on each processor. */
3172   leafCounter=0;
3173   for (i=0; i<pEnd-pStart; i++) {
3174     if (newOwners[i] >= 0) {
3175       if (newOwners[i] != rank) {
3176         leafCounter++;
3177       }
3178     } else {
3179       if (isLeaf[i]) {
3180         leafCounter++;
3181       }
3182     }
3183   }
3184 
3185   /* Now set up the new sf by creating the leaf arrays */
3186   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
3187   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
3188 
3189   leafCounter = 0;
3190   counter = 0;
3191   for (i=0; i<pEnd-pStart; i++) {
3192     if (newOwners[i] >= 0) {
3193       if (newOwners[i] != rank) {
3194         leafsNew[leafCounter] = i;
3195         leafLocationsNew[leafCounter].rank = newOwners[i];
3196         leafLocationsNew[leafCounter].index = newNumbers[i];
3197         leafCounter++;
3198       }
3199     } else {
3200       if (isLeaf[i]) {
3201         leafsNew[leafCounter] = i;
3202         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
3203         leafLocationsNew[leafCounter].index = iremote[counter].index;
3204         leafCounter++;
3205       }
3206     }
3207     if (isLeaf[i]) {
3208       counter++;
3209     }
3210   }
3211 
3212   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
3213   ierr = PetscFree(newOwners);CHKERRQ(ierr);
3214   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
3215   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3216   PetscFunctionReturn(0);
3217 }
3218 
3219 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
3220 {
3221   PetscInt *distribution, min, max, sum, i, ierr;
3222   PetscMPIInt rank, size;
3223   PetscFunctionBegin;
3224   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3225   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3226   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
3227   for (i=0; i<n; i++) {
3228     if (part) distribution[part[i]] += vtxwgt[skip*i];
3229     else distribution[rank] += vtxwgt[skip*i];
3230   }
3231   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3232   min = distribution[0];
3233   max = distribution[0];
3234   sum = distribution[0];
3235   for (i=1; i<size; i++) {
3236     if (distribution[i]<min) min=distribution[i];
3237     if (distribution[i]>max) max=distribution[i];
3238     sum += distribution[i];
3239   }
3240   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
3241   ierr = PetscFree(distribution);CHKERRQ(ierr);
3242   PetscFunctionReturn(0);
3243 }
3244 
3245 #endif
3246 
3247 /*@
3248   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
3249 
3250   Input parameters:
3251 + dm               - The DMPlex object.
3252 . entityDepth      - depth of the entity to balance (0 -> balance vertices).
3253 . useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
3254 - parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
3255 
3256   Output parameters:
3257 . success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
3258 
3259   Level: intermediate
3260 
3261 @*/
3262 
3263 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
3264 {
3265 #if defined(PETSC_HAVE_PARMETIS)
3266   PetscSF     sf;
3267   PetscInt    ierr, i, j, idx, jdx;
3268   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
3269   const       PetscInt *degrees, *ilocal;
3270   const       PetscSFNode *iremote;
3271   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
3272   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
3273   PetscMPIInt rank, size;
3274   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
3275   const       PetscInt *cumSumVertices;
3276   PetscInt    offset, counter;
3277   PetscInt    lenadjncy;
3278   PetscInt    *xadj, *adjncy, *vtxwgt;
3279   PetscInt    lenxadj;
3280   PetscInt    *adjwgt = NULL;
3281   PetscInt    *part, *options;
3282   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
3283   real_t      *ubvec;
3284   PetscInt    *firstVertices, *renumbering;
3285   PetscInt    failed, failedGlobal;
3286   MPI_Comm    comm;
3287   Mat         A, Apre;
3288   const char *prefix = NULL;
3289   PetscViewer       viewer;
3290   PetscViewerFormat format;
3291   PetscLayout layout;
3292 
3293   PetscFunctionBegin;
3294   if (success) *success = PETSC_FALSE;
3295   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3296   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3297   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3298   if (size==1) PetscFunctionReturn(0);
3299 
3300   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3301 
3302   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
3303   if (viewer) {
3304     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
3305   }
3306 
3307   /* Figure out all points in the plex that we are interested in balancing. */
3308   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
3309   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3310   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
3311 
3312   for (i=0; i<pEnd-pStart; i++) {
3313     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
3314   }
3315 
3316   /* There are three types of points:
3317    * exclusivelyOwned: points that are owned by this process and only seen by this process
3318    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
3319    * leaf: a point that is seen by this process but owned by a different process
3320    */
3321   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3322   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3323   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3324   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
3325   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
3326   for (i=0; i<pEnd-pStart; i++) {
3327     isNonExclusivelyOwned[i] = PETSC_FALSE;
3328     isExclusivelyOwned[i] = PETSC_FALSE;
3329     isLeaf[i] = PETSC_FALSE;
3330   }
3331 
3332   /* start by marking all the leafs */
3333   for (i=0; i<nleafs; i++) {
3334     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3335   }
3336 
3337   /* for an owned point, we can figure out whether another processor sees it or
3338    * not by calculating its degree */
3339   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
3340   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
3341 
3342   numExclusivelyOwned = 0;
3343   numNonExclusivelyOwned = 0;
3344   for (i=0; i<pEnd-pStart; i++) {
3345     if (toBalance[i]) {
3346       if (degrees[i] > 0) {
3347         isNonExclusivelyOwned[i] = PETSC_TRUE;
3348         numNonExclusivelyOwned += 1;
3349       } else {
3350         if (!isLeaf[i]) {
3351           isExclusivelyOwned[i] = PETSC_TRUE;
3352           numExclusivelyOwned += 1;
3353         }
3354       }
3355     }
3356   }
3357 
3358   /* We are going to build a graph with one vertex per core representing the
3359    * exclusively owned points and then one vertex per nonExclusively owned
3360    * point. */
3361 
3362   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3363   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
3364   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3365   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
3366 
3367   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3368   for (i=0; i<pEnd-pStart; i++) {globalNumbersOfLocalOwnedVertices[i] = pStart - 1;}
3369   offset = cumSumVertices[rank];
3370   counter = 0;
3371   for (i=0; i<pEnd-pStart; i++) {
3372     if (toBalance[i]) {
3373       if (degrees[i] > 0) {
3374         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
3375         counter++;
3376       }
3377     }
3378   }
3379 
3380   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
3381   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
3382   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3383   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3384 
3385   /* Now start building the data structure for ParMETIS */
3386 
3387   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
3388   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
3389   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3390   ierr = MatSetUp(Apre);CHKERRQ(ierr);
3391 
3392   for (i=0; i<pEnd-pStart; i++) {
3393     if (toBalance[i]) {
3394       idx = cumSumVertices[rank];
3395       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3396       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3397       else continue;
3398       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3399       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3400     }
3401   }
3402 
3403   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3404   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3405 
3406   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
3407   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
3408   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3409   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
3410   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
3411 
3412   for (i=0; i<pEnd-pStart; i++) {
3413     if (toBalance[i]) {
3414       idx = cumSumVertices[rank];
3415       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3416       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3417       else continue;
3418       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3419       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3420     }
3421   }
3422 
3423   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3424   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3425   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
3426   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3427 
3428   nparts = size;
3429   wgtflag = 2;
3430   numflag = 0;
3431   ncon = 2;
3432   real_t *tpwgts;
3433   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
3434   for (i=0; i<ncon*nparts; i++) {
3435     tpwgts[i] = 1./(nparts);
3436   }
3437 
3438   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
3439   ubvec[0] = 1.01;
3440   ubvec[1] = 1.01;
3441   lenadjncy = 0;
3442   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3443     PetscInt temp=0;
3444     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3445     lenadjncy += temp;
3446     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3447   }
3448   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
3449   lenxadj = 2 + numNonExclusivelyOwned;
3450   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
3451   xadj[0] = 0;
3452   counter = 0;
3453   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3454     PetscInt        temp=0;
3455     const PetscInt *cols;
3456     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3457     ierr = PetscArraycpy(&adjncy[counter], cols, temp);CHKERRQ(ierr);
3458     counter += temp;
3459     xadj[i+1] = counter;
3460     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3461   }
3462 
3463   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
3464   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
3465   vtxwgt[0] = numExclusivelyOwned;
3466   if (ncon>1) vtxwgt[1] = 1;
3467   for (i=0; i<numNonExclusivelyOwned; i++) {
3468     vtxwgt[ncon*(i+1)] = 1;
3469     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3470   }
3471 
3472   if (viewer) {
3473     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
3474     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
3475   }
3476   if (parallel) {
3477     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
3478     options[0] = 1;
3479     options[1] = 0; /* Verbosity */
3480     options[2] = 0; /* Seed */
3481     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3482     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
3483     if (useInitialGuess) {
3484       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
3485       PetscStackPush("ParMETIS_V3_RefineKway");
3486       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3487       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3488       PetscStackPop;
3489     } else {
3490       PetscStackPush("ParMETIS_V3_PartKway");
3491       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3492       PetscStackPop;
3493       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3494     }
3495     ierr = PetscFree(options);CHKERRQ(ierr);
3496   } else {
3497     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
3498     Mat As;
3499     PetscInt numRows;
3500     PetscInt *partGlobal;
3501     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
3502 
3503     PetscInt *numExclusivelyOwnedAll;
3504     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
3505     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3506     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
3507 
3508     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
3509     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
3510     if (!rank) {
3511       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3512       lenadjncy = 0;
3513 
3514       for (i=0; i<numRows; i++) {
3515         PetscInt temp=0;
3516         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3517         lenadjncy += temp;
3518         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3519       }
3520       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
3521       lenxadj = 1 + numRows;
3522       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
3523       xadj_g[0] = 0;
3524       counter = 0;
3525       for (i=0; i<numRows; i++) {
3526         PetscInt        temp=0;
3527         const PetscInt *cols;
3528         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3529         ierr = PetscArraycpy(&adjncy_g[counter], cols, temp);CHKERRQ(ierr);
3530         counter += temp;
3531         xadj_g[i+1] = counter;
3532         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3533       }
3534       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
3535       for (i=0; i<size; i++){
3536         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3537         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3538         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3539           vtxwgt_g[ncon*j] = 1;
3540           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3541         }
3542       }
3543       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
3544       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
3545       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3546       options[METIS_OPTION_CONTIG] = 1;
3547       PetscStackPush("METIS_PartGraphKway");
3548       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3549       PetscStackPop;
3550       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3551       ierr = PetscFree(options);CHKERRQ(ierr);
3552       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
3553       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
3554       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
3555     }
3556     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
3557 
3558     /* Now scatter the parts array. */
3559     {
3560       PetscMPIInt *counts, *mpiCumSumVertices;
3561       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
3562       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
3563       for(i=0; i<size; i++) {
3564         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3565       }
3566       for(i=0; i<=size; i++) {
3567         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3568       }
3569       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3570       ierr = PetscFree(counts);CHKERRQ(ierr);
3571       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3572     }
3573 
3574     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3575     ierr = MatDestroy(&As);CHKERRQ(ierr);
3576   }
3577 
3578   ierr = MatDestroy(&A);CHKERRQ(ierr);
3579   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3580   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3581 
3582   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3583 
3584   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3585   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3586   firstVertices[rank] = part[0];
3587   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3588   for (i=0; i<size; i++) {
3589     renumbering[firstVertices[i]] = i;
3590   }
3591   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3592     part[i] = renumbering[part[i]];
3593   }
3594   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3595   failed = (PetscInt)(part[0] != rank);
3596   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3597 
3598   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3599   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3600 
3601   if (failedGlobal > 0) {
3602     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3603     ierr = PetscFree(xadj);CHKERRQ(ierr);
3604     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3605     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3606     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3607     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3608     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3609     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3610     ierr = PetscFree(part);CHKERRQ(ierr);
3611     if (viewer) {
3612       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3613       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3614     }
3615     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3616     PetscFunctionReturn(0);
3617   }
3618 
3619   /*Let's check how well we did distributing points*/
3620   if (viewer) {
3621     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3622     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3623     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3624     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3625     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3626   }
3627 
3628   /* Now check that every vertex is owned by a process that it is actually connected to. */
3629   for (i=1; i<=numNonExclusivelyOwned; i++) {
3630     PetscInt loc = 0;
3631     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3632     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3633     if (loc<0) {
3634       part[i] = rank;
3635     }
3636   }
3637 
3638   /* Let's see how significant the influences of the previous fixing up step was.*/
3639   if (viewer) {
3640     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3641     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3642   }
3643 
3644   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3645   ierr = PetscFree(xadj);CHKERRQ(ierr);
3646   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3647   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3648 
3649   /* Almost done, now rewrite the SF to reflect the new ownership. */
3650   {
3651     PetscInt *pointsToRewrite;
3652     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3653     counter = 0;
3654     for(i=0; i<pEnd-pStart; i++) {
3655       if (toBalance[i]) {
3656         if (isNonExclusivelyOwned[i]) {
3657           pointsToRewrite[counter] = i + pStart;
3658           counter++;
3659         }
3660       }
3661     }
3662     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3663     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3664   }
3665 
3666   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3667   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3668   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3669   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3670   ierr = PetscFree(part);CHKERRQ(ierr);
3671   if (viewer) {
3672     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3673     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3674   }
3675   if (success) *success = PETSC_TRUE;
3676   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3677 #else
3678   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3679 #endif
3680   PetscFunctionReturn(0);
3681 }
3682