xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 2c7c0729d89b910088ee906aed2239ea927d033e)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {https://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PetscBool PTScotchPartitionercite = PETSC_FALSE;
31 const char PTScotchPartitionerCitation[] =
32   "@article{PTSCOTCH,\n"
33   "  author  = {C. Chevalier and F. Pellegrini},\n"
34   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
35   "  journal = {Parallel Computing},\n"
36   "  volume  = {34},\n"
37   "  number  = {6},\n"
38   "  pages   = {318--331},\n"
39   "  year    = {2008},\n"
40   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
41   "}\n";
42 
43 
44 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
45 
46 static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
47 {
48   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
49   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
50   IS             cellNumbering;
51   const PetscInt *cellNum;
52   PetscBool      useCone, useClosure;
53   PetscSection   section;
54   PetscSegBuffer adjBuffer;
55   PetscSF        sfPoint;
56   PetscInt       *adjCells = NULL, *remoteCells = NULL;
57   const PetscInt *local;
58   PetscInt       nroots, nleaves, l;
59   PetscMPIInt    rank;
60   PetscErrorCode ierr;
61 
62   PetscFunctionBegin;
63   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
64   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
65   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
66   if (dim != depth) {
67     /* We do not handle the uninterpolated case here */
68     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
69     /* DMPlexCreateNeighborCSR does not make a numbering */
70     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
71     /* Different behavior for empty graphs */
72     if (!*numVertices) {
73       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
74       (*offsets)[0] = 0;
75     }
76     /* Broken in parallel */
77     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
78     PetscFunctionReturn(0);
79   }
80   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
81   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
82   /* Build adjacency graph via a section/segbuffer */
83   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
84   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
85   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
86   /* Always use FVM adjacency to create partitioner graph */
87   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
88   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
89   ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
90   if (globalNumbering) {
91     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
92     *globalNumbering = cellNumbering;
93   }
94   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
95   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
96      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
97    */
98   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
99   if (nroots >= 0) {
100     PetscInt fStart, fEnd, f;
101 
102     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
103     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
104     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
105     for (f = fStart; f < fEnd; ++f) {
106       const PetscInt *support;
107       PetscInt        supportSize;
108 
109       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
110       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
111       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
112       else if (supportSize == 2) {
113         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
114         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
115         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
116         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
117       }
118       /* Handle non-conforming meshes */
119       if (supportSize > 2) {
120         PetscInt        numChildren, i;
121         const PetscInt *children;
122 
123         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
124         for (i = 0; i < numChildren; ++i) {
125           const PetscInt child = children[i];
126           if (fStart <= child && child < fEnd) {
127             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
128             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
129             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
130             else if (supportSize == 2) {
131               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
132               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
133               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
134               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
135             }
136           }
137         }
138       }
139     }
140     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
141     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
142     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
143     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
144     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
145   }
146   /* Combine local and global adjacencies */
147   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
148     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
149     if (nroots > 0) {if (cellNum[p] < 0) continue;}
150     /* Add remote cells */
151     if (remoteCells) {
152       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
153       PetscInt       coneSize, numChildren, c, i;
154       const PetscInt *cone, *children;
155 
156       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
157       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
158       for (c = 0; c < coneSize; ++c) {
159         const PetscInt point = cone[c];
160         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
161           PetscInt *PETSC_RESTRICT pBuf;
162           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
163           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
164           *pBuf = remoteCells[point];
165         }
166         /* Handle non-conforming meshes */
167         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
168         for (i = 0; i < numChildren; ++i) {
169           const PetscInt child = children[i];
170           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
171             PetscInt *PETSC_RESTRICT pBuf;
172             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
173             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
174             *pBuf = remoteCells[child];
175           }
176         }
177       }
178     }
179     /* Add local cells */
180     adjSize = PETSC_DETERMINE;
181     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
182     for (a = 0; a < adjSize; ++a) {
183       const PetscInt point = adj[a];
184       if (point != p && pStart <= point && point < pEnd) {
185         PetscInt *PETSC_RESTRICT pBuf;
186         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
187         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
188         *pBuf = DMPlex_GlobalID(cellNum[point]);
189       }
190     }
191     (*numVertices)++;
192   }
193   ierr = PetscFree(adj);CHKERRQ(ierr);
194   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
195   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
196 
197   /* Derive CSR graph from section/segbuffer */
198   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
199   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
200   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
201   for (idx = 0, p = pStart; p < pEnd; p++) {
202     if (nroots > 0) {if (cellNum[p] < 0) continue;}
203     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
204   }
205   vOffsets[*numVertices] = size;
206   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
207 
208   if (nroots >= 0) {
209     /* Filter out duplicate edges using section/segbuffer */
210     ierr = PetscSectionReset(section);CHKERRQ(ierr);
211     ierr = PetscSectionSetChart(section, 0, *numVertices);CHKERRQ(ierr);
212     for (p = 0; p < *numVertices; p++) {
213       PetscInt start = vOffsets[p], end = vOffsets[p+1];
214       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
215       ierr = PetscSortRemoveDupsInt(&numEdges, &graph[start]);CHKERRQ(ierr);
216       ierr = PetscSectionSetDof(section, p, numEdges);CHKERRQ(ierr);
217       ierr = PetscSegBufferGetInts(adjBuffer, numEdges, &edges);CHKERRQ(ierr);
218       ierr = PetscArraycpy(edges, &graph[start], numEdges);CHKERRQ(ierr);
219     }
220     ierr = PetscFree(vOffsets);CHKERRQ(ierr);
221     ierr = PetscFree(graph);CHKERRQ(ierr);
222     /* Derive CSR graph from section/segbuffer */
223     ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
224     ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
225     ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
226     for (idx = 0, p = 0; p < *numVertices; p++) {
227       ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
228     }
229     vOffsets[*numVertices] = size;
230     ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
231   } else {
232     /* Sort adjacencies (not strictly necessary) */
233     for (p = 0; p < *numVertices; p++) {
234       PetscInt start = vOffsets[p], end = vOffsets[p+1];
235       ierr = PetscSortInt(end-start, &graph[start]);CHKERRQ(ierr);
236     }
237   }
238 
239   if (offsets) *offsets = vOffsets;
240   if (adjacency) *adjacency = graph;
241 
242   /* Cleanup */
243   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
244   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
245   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
246   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
247   PetscFunctionReturn(0);
248 }
249 
250 static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
251 {
252   Mat            conn, CSR;
253   IS             fis, cis, cis_own;
254   PetscSF        sfPoint;
255   const PetscInt *rows, *cols, *ii, *jj;
256   PetscInt       *idxs,*idxs2;
257   PetscInt       dim, depth, floc, cloc, i, M, N, c, lm, m, cStart, cEnd, fStart, fEnd;
258   PetscMPIInt    rank;
259   PetscBool      flg;
260   PetscErrorCode ierr;
261 
262   PetscFunctionBegin;
263   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
264   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
265   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
266   if (dim != depth) {
267     /* We do not handle the uninterpolated case here */
268     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
269     /* DMPlexCreateNeighborCSR does not make a numbering */
270     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
271     /* Different behavior for empty graphs */
272     if (!*numVertices) {
273       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
274       (*offsets)[0] = 0;
275     }
276     /* Broken in parallel */
277     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
278     PetscFunctionReturn(0);
279   }
280   /* Interpolated and parallel case */
281 
282   /* numbering */
283   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
284   ierr = DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);CHKERRQ(ierr);
285   ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
286   ierr = DMPlexCreateNumbering_Plex(dm, cStart, cEnd, 0, &N, sfPoint, &cis);CHKERRQ(ierr);
287   ierr = DMPlexCreateNumbering_Plex(dm, fStart, fEnd, 0, &M, sfPoint, &fis);CHKERRQ(ierr);
288   if (globalNumbering) {
289     ierr = ISDuplicate(cis, globalNumbering);CHKERRQ(ierr);
290   }
291 
292   /* get positive global ids and local sizes for facets and cells */
293   ierr = ISGetLocalSize(fis, &m);CHKERRQ(ierr);
294   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
295   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
296   for (i = 0, floc = 0; i < m; i++) {
297     const PetscInt p = rows[i];
298 
299     if (p < 0) {
300       idxs[i] = -(p+1);
301     } else {
302       idxs[i] = p;
303       floc   += 1;
304     }
305   }
306   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
307   ierr = ISDestroy(&fis);CHKERRQ(ierr);
308   ierr = ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);CHKERRQ(ierr);
309 
310   ierr = ISGetLocalSize(cis, &m);CHKERRQ(ierr);
311   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
312   ierr = PetscMalloc1(m, &idxs);CHKERRQ(ierr);
313   ierr = PetscMalloc1(m, &idxs2);CHKERRQ(ierr);
314   for (i = 0, cloc = 0; i < m; i++) {
315     const PetscInt p = cols[i];
316 
317     if (p < 0) {
318       idxs[i] = -(p+1);
319     } else {
320       idxs[i]       = p;
321       idxs2[cloc++] = p;
322     }
323   }
324   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
325   ierr = ISDestroy(&cis);CHKERRQ(ierr);
326   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);CHKERRQ(ierr);
327   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);CHKERRQ(ierr);
328 
329   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
330   ierr = MatCreate(PetscObjectComm((PetscObject)dm), &conn);CHKERRQ(ierr);
331   ierr = MatSetSizes(conn, floc, cloc, M, N);CHKERRQ(ierr);
332   ierr = MatSetType(conn, MATMPIAIJ);CHKERRQ(ierr);
333   ierr = DMPlexGetMaxSizes(dm, NULL, &lm);CHKERRQ(ierr);
334   ierr = MPI_Allreduce(&lm, &m, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject) dm));CHKERRQ(ierr);
335   ierr = MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);CHKERRQ(ierr);
336 
337   /* Assemble matrix */
338   ierr = ISGetIndices(fis, &rows);CHKERRQ(ierr);
339   ierr = ISGetIndices(cis, &cols);CHKERRQ(ierr);
340   for (c = cStart; c < cEnd; c++) {
341     const PetscInt *cone;
342     PetscInt        coneSize, row, col, f;
343 
344     col  = cols[c-cStart];
345     ierr = DMPlexGetCone(dm, c, &cone);CHKERRQ(ierr);
346     ierr = DMPlexGetConeSize(dm, c, &coneSize);CHKERRQ(ierr);
347     for (f = 0; f < coneSize; f++) {
348       const PetscScalar v = 1.0;
349       const PetscInt *children;
350       PetscInt        numChildren, ch;
351 
352       row  = rows[cone[f]-fStart];
353       ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
354 
355       /* non-conforming meshes */
356       ierr = DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);CHKERRQ(ierr);
357       for (ch = 0; ch < numChildren; ch++) {
358         const PetscInt child = children[ch];
359 
360         if (child < fStart || child >= fEnd) continue;
361         row  = rows[child-fStart];
362         ierr = MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);CHKERRQ(ierr);
363       }
364     }
365   }
366   ierr = ISRestoreIndices(fis, &rows);CHKERRQ(ierr);
367   ierr = ISRestoreIndices(cis, &cols);CHKERRQ(ierr);
368   ierr = ISDestroy(&fis);CHKERRQ(ierr);
369   ierr = ISDestroy(&cis);CHKERRQ(ierr);
370   ierr = MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
371   ierr = MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
372 
373   /* Get parallel CSR by doing conn^T * conn */
374   ierr = MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);CHKERRQ(ierr);
375   ierr = MatDestroy(&conn);CHKERRQ(ierr);
376 
377   /* extract local part of the CSR */
378   ierr = MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);CHKERRQ(ierr);
379   ierr = MatDestroy(&CSR);CHKERRQ(ierr);
380   ierr = MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
381   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
382 
383   /* get back requested output */
384   if (numVertices) *numVertices = m;
385   if (offsets) {
386     ierr = PetscCalloc1(m+1, &idxs);CHKERRQ(ierr);
387     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
388     *offsets = idxs;
389   }
390   if (adjacency) {
391     ierr = PetscMalloc1(ii[m] - m, &idxs);CHKERRQ(ierr);
392     ierr = ISGetIndices(cis_own, &rows);CHKERRQ(ierr);
393     for (i = 0, c = 0; i < m; i++) {
394       PetscInt j, g = rows[i];
395 
396       for (j = ii[i]; j < ii[i+1]; j++) {
397         if (jj[j] == g) continue; /* again, self-connectivity */
398         idxs[c++] = jj[j];
399       }
400     }
401     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
402     ierr = ISRestoreIndices(cis_own, &rows);CHKERRQ(ierr);
403     *adjacency = idxs;
404   }
405 
406   /* cleanup */
407   ierr = ISDestroy(&cis_own);CHKERRQ(ierr);
408   ierr = MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);CHKERRQ(ierr);
409   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
410   ierr = MatDestroy(&conn);CHKERRQ(ierr);
411   PetscFunctionReturn(0);
412 }
413 
414 /*@C
415   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
416 
417   Input Parameters:
418 + dm      - The mesh DM dm
419 - height  - Height of the strata from which to construct the graph
420 
421   Output Parameter:
422 + numVertices     - Number of vertices in the graph
423 . offsets         - Point offsets in the graph
424 . adjacency       - Point connectivity in the graph
425 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
426 
427   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
428   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().
429 
430   Level: developer
431 
432 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
433 @*/
434 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
435 {
436   PetscErrorCode ierr;
437   PetscBool      usemat = PETSC_FALSE;
438 
439   PetscFunctionBegin;
440   ierr = PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);CHKERRQ(ierr);
441   if (usemat) {
442     ierr = DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
443   } else {
444     ierr = DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);CHKERRQ(ierr);
445   }
446   PetscFunctionReturn(0);
447 }
448 
449 /*@C
450   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
451 
452   Collective on DM
453 
454   Input Arguments:
455 + dm - The DMPlex
456 - cellHeight - The height of mesh points to treat as cells (default should be 0)
457 
458   Output Arguments:
459 + numVertices - The number of local vertices in the graph, or cells in the mesh.
460 . offsets     - The offset to the adjacency list for each cell
461 - adjacency   - The adjacency list for all cells
462 
463   Note: This is suitable for input to a mesh partitioner like ParMetis.
464 
465   Level: advanced
466 
467 .seealso: DMPlexCreate()
468 @*/
469 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
470 {
471   const PetscInt maxFaceCases = 30;
472   PetscInt       numFaceCases = 0;
473   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
474   PetscInt      *off, *adj;
475   PetscInt      *neighborCells = NULL;
476   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
477   PetscErrorCode ierr;
478 
479   PetscFunctionBegin;
480   /* For parallel partitioning, I think you have to communicate supports */
481   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
482   cellDim = dim - cellHeight;
483   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
484   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
485   if (cEnd - cStart == 0) {
486     if (numVertices) *numVertices = 0;
487     if (offsets)   *offsets   = NULL;
488     if (adjacency) *adjacency = NULL;
489     PetscFunctionReturn(0);
490   }
491   numCells  = cEnd - cStart;
492   faceDepth = depth - cellHeight;
493   if (dim == depth) {
494     PetscInt f, fStart, fEnd;
495 
496     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
497     /* Count neighboring cells */
498     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
499     for (f = fStart; f < fEnd; ++f) {
500       const PetscInt *support;
501       PetscInt        supportSize;
502       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
503       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
504       if (supportSize == 2) {
505         PetscInt numChildren;
506 
507         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
508         if (!numChildren) {
509           ++off[support[0]-cStart+1];
510           ++off[support[1]-cStart+1];
511         }
512       }
513     }
514     /* Prefix sum */
515     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
516     if (adjacency) {
517       PetscInt *tmp;
518 
519       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
520       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
521       ierr = PetscArraycpy(tmp, off, numCells+1);CHKERRQ(ierr);
522       /* Get neighboring cells */
523       for (f = fStart; f < fEnd; ++f) {
524         const PetscInt *support;
525         PetscInt        supportSize;
526         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
527         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
528         if (supportSize == 2) {
529           PetscInt numChildren;
530 
531           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
532           if (!numChildren) {
533             adj[tmp[support[0]-cStart]++] = support[1];
534             adj[tmp[support[1]-cStart]++] = support[0];
535           }
536         }
537       }
538       if (PetscDefined(USE_DEBUG)) {
539         for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
540       }
541       ierr = PetscFree(tmp);CHKERRQ(ierr);
542     }
543     if (numVertices) *numVertices = numCells;
544     if (offsets)   *offsets   = off;
545     if (adjacency) *adjacency = adj;
546     PetscFunctionReturn(0);
547   }
548   /* Setup face recognition */
549   if (faceDepth == 1) {
550     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
551 
552     for (c = cStart; c < cEnd; ++c) {
553       PetscInt corners;
554 
555       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
556       if (!cornersSeen[corners]) {
557         PetscInt nFV;
558 
559         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
560         cornersSeen[corners] = 1;
561 
562         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
563 
564         numFaceVertices[numFaceCases++] = nFV;
565       }
566     }
567   }
568   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
569   /* Count neighboring cells */
570   for (cell = cStart; cell < cEnd; ++cell) {
571     PetscInt numNeighbors = PETSC_DETERMINE, n;
572 
573     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
574     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
575     for (n = 0; n < numNeighbors; ++n) {
576       PetscInt        cellPair[2];
577       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
578       PetscInt        meetSize = 0;
579       const PetscInt *meet    = NULL;
580 
581       cellPair[0] = cell; cellPair[1] = neighborCells[n];
582       if (cellPair[0] == cellPair[1]) continue;
583       if (!found) {
584         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
585         if (meetSize) {
586           PetscInt f;
587 
588           for (f = 0; f < numFaceCases; ++f) {
589             if (numFaceVertices[f] == meetSize) {
590               found = PETSC_TRUE;
591               break;
592             }
593           }
594         }
595         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
596       }
597       if (found) ++off[cell-cStart+1];
598     }
599   }
600   /* Prefix sum */
601   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
602 
603   if (adjacency) {
604     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
605     /* Get neighboring cells */
606     for (cell = cStart; cell < cEnd; ++cell) {
607       PetscInt numNeighbors = PETSC_DETERMINE, n;
608       PetscInt cellOffset   = 0;
609 
610       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
611       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
612       for (n = 0; n < numNeighbors; ++n) {
613         PetscInt        cellPair[2];
614         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
615         PetscInt        meetSize = 0;
616         const PetscInt *meet    = NULL;
617 
618         cellPair[0] = cell; cellPair[1] = neighborCells[n];
619         if (cellPair[0] == cellPair[1]) continue;
620         if (!found) {
621           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
622           if (meetSize) {
623             PetscInt f;
624 
625             for (f = 0; f < numFaceCases; ++f) {
626               if (numFaceVertices[f] == meetSize) {
627                 found = PETSC_TRUE;
628                 break;
629               }
630             }
631           }
632           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
633         }
634         if (found) {
635           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
636           ++cellOffset;
637         }
638       }
639     }
640   }
641   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
642   if (numVertices) *numVertices = numCells;
643   if (offsets)   *offsets   = off;
644   if (adjacency) *adjacency = adj;
645   PetscFunctionReturn(0);
646 }
647 
648 /*@C
649   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
650 
651   Not Collective
652 
653   Input Parameters:
654 + name        - The name of a new user-defined creation routine
655 - create_func - The creation routine itself
656 
657   Notes:
658   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
659 
660   Sample usage:
661 .vb
662     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
663 .ve
664 
665   Then, your PetscPartitioner type can be chosen with the procedural interface via
666 .vb
667     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
668     PetscPartitionerSetType(PetscPartitioner, "my_part");
669 .ve
670    or at runtime via the option
671 .vb
672     -petscpartitioner_type my_part
673 .ve
674 
675   Level: advanced
676 
677 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
678 
679 @*/
680 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
681 {
682   PetscErrorCode ierr;
683 
684   PetscFunctionBegin;
685   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
686   PetscFunctionReturn(0);
687 }
688 
689 /*@C
690   PetscPartitionerSetType - Builds a particular PetscPartitioner
691 
692   Collective on PetscPartitioner
693 
694   Input Parameters:
695 + part - The PetscPartitioner object
696 - name - The kind of partitioner
697 
698   Options Database Key:
699 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
700 
701   Level: intermediate
702 
703 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
704 @*/
705 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
706 {
707   PetscErrorCode (*r)(PetscPartitioner);
708   PetscBool      match;
709   PetscErrorCode ierr;
710 
711   PetscFunctionBegin;
712   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
713   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
714   if (match) PetscFunctionReturn(0);
715 
716   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
717   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
718 
719   if (part->ops->destroy) {
720     ierr = (*part->ops->destroy)(part);CHKERRQ(ierr);
721   }
722   part->noGraph = PETSC_FALSE;
723   ierr = PetscMemzero(part->ops, sizeof(*part->ops));CHKERRQ(ierr);
724   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
725   ierr = (*r)(part);CHKERRQ(ierr);
726   PetscFunctionReturn(0);
727 }
728 
729 /*@C
730   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
731 
732   Not Collective
733 
734   Input Parameter:
735 . part - The PetscPartitioner
736 
737   Output Parameter:
738 . name - The PetscPartitioner type name
739 
740   Level: intermediate
741 
742 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
743 @*/
744 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
745 {
746   PetscFunctionBegin;
747   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
748   PetscValidPointer(name, 2);
749   *name = ((PetscObject) part)->type_name;
750   PetscFunctionReturn(0);
751 }
752 
753 /*@C
754    PetscPartitionerViewFromOptions - View from Options
755 
756    Collective on PetscPartitioner
757 
758    Input Parameters:
759 +  A - the PetscPartitioner object
760 .  obj - Optional object
761 -  name - command line option
762 
763    Level: intermediate
764 .seealso:  PetscPartitionerView(), PetscObjectViewFromOptions()
765 @*/
766 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner A,PetscObject obj,const char name[])
767 {
768   PetscErrorCode ierr;
769 
770   PetscFunctionBegin;
771   PetscValidHeaderSpecific(A,PETSCPARTITIONER_CLASSID,1);
772   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
773   PetscFunctionReturn(0);
774 }
775 
776 /*@C
777   PetscPartitionerView - Views a PetscPartitioner
778 
779   Collective on PetscPartitioner
780 
781   Input Parameter:
782 + part - the PetscPartitioner object to view
783 - v    - the viewer
784 
785   Level: developer
786 
787 .seealso: PetscPartitionerDestroy()
788 @*/
789 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
790 {
791   PetscMPIInt    size;
792   PetscBool      isascii;
793   PetscErrorCode ierr;
794 
795   PetscFunctionBegin;
796   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
797   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
798   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
799   if (isascii) {
800     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
801     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
802     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
803     ierr = PetscViewerASCIIPrintf(v, "  edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
804     ierr = PetscViewerASCIIPrintf(v, "  balance: %.2g\n", part->balance);CHKERRQ(ierr);
805     ierr = PetscViewerASCIIPrintf(v, "  use vertex weights: %d\n", part->usevwgt);CHKERRQ(ierr);
806   }
807   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
808   PetscFunctionReturn(0);
809 }
810 
811 static PetscErrorCode PetscPartitionerGetDefaultType(MPI_Comm comm, const char *currentType, const char **defaultType)
812 {
813   PetscMPIInt    size;
814   PetscErrorCode ierr;
815 
816   PetscFunctionBegin;
817   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
818   if (!currentType) {
819     if (size == 1) {
820       *defaultType = PETSCPARTITIONERSIMPLE;
821     } else {
822 #if defined(PETSC_HAVE_PARMETIS)
823       *defaultType = PETSCPARTITIONERPARMETIS;
824 #elif defined(PETSC_HAVE_PTSCOTCH)
825       *defaultType = PETSCPARTITIONERPTSCOTCH;
826 #elif defined(PETSC_HAVE_CHACO)
827       *defaultType = PETSCPARTITIONERCHACO;
828 #else
829       *defaultType = PETSCPARTITIONERSIMPLE;
830 #endif
831     }
832   } else {
833     *defaultType = currentType;
834   }
835   PetscFunctionReturn(0);
836 }
837 
838 /*@
839   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
840 
841   Collective on PetscPartitioner
842 
843   Input Parameter:
844 . part - the PetscPartitioner object to set options for
845 
846   Options Database Keys:
847 +  -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
848 .  -petscpartitioner_use_vertex_weights - Uses weights associated with the graph vertices
849 -  -petscpartitioner_view_graph - View the graph each time PetscPartitionerPartition is called. Viewer can be customized, see PetscOptionsGetViewer()
850 
851   Level: developer
852 
853 .seealso: PetscPartitionerView(), PetscPartitionerSetType(), PetscPartitionerPartition()
854 @*/
855 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
856 {
857   const char    *defaultType;
858   char           name[256];
859   PetscBool      flg;
860   PetscErrorCode ierr;
861 
862   PetscFunctionBegin;
863   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
864   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->comm, ((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
865   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
866   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
867   if (flg) {
868     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
869   } else if (!((PetscObject) part)->type_name) {
870     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
871   }
872   ierr = PetscOptionsBool("-petscpartitioner_use_vertex_weights","Use vertex weights","",part->usevwgt,&part->usevwgt,NULL);CHKERRQ(ierr);
873   if (part->ops->setfromoptions) {
874     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
875   }
876   ierr = PetscViewerDestroy(&part->viewer);CHKERRQ(ierr);
877   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
878   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view", &part->viewer, NULL, NULL);CHKERRQ(ierr);
879   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, NULL, &part->viewGraph);CHKERRQ(ierr);
880   /* process any options handlers added with PetscObjectAddOptionsHandler() */
881   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
882   ierr = PetscOptionsEnd();CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 /*@C
887   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
888 
889   Collective on PetscPartitioner
890 
891   Input Parameter:
892 . part - the PetscPartitioner object to setup
893 
894   Level: developer
895 
896 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
897 @*/
898 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
899 {
900   PetscErrorCode ierr;
901 
902   PetscFunctionBegin;
903   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
904   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
905   PetscFunctionReturn(0);
906 }
907 
908 /*@
909   PetscPartitionerDestroy - Destroys a PetscPartitioner object
910 
911   Collective on PetscPartitioner
912 
913   Input Parameter:
914 . part - the PetscPartitioner object to destroy
915 
916   Level: developer
917 
918 .seealso: PetscPartitionerView()
919 @*/
920 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
921 {
922   PetscErrorCode ierr;
923 
924   PetscFunctionBegin;
925   if (!*part) PetscFunctionReturn(0);
926   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
927 
928   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
929   ((PetscObject) (*part))->refct = 0;
930 
931   ierr = PetscViewerDestroy(&(*part)->viewer);CHKERRQ(ierr);
932   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
933   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
934   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
935   PetscFunctionReturn(0);
936 }
937 
938 /*@
939   PetscPartitionerPartition - Partition a graph
940 
941   Collective on PetscPartitioner
942 
943   Input Parameters:
944 + part    - The PetscPartitioner
945 . nparts  - Number of partitions
946 . numVertices - Number of vertices in the local part of the graph
947 . start - row pointers for the local part of the graph (CSR style)
948 . adjacency - adjacency list (CSR style)
949 . vertexSection - PetscSection describing the absolute weight of each local vertex (can be NULL)
950 - targetSection - PetscSection describing the absolute weight of each partition (can be NULL)
951 
952   Output Parameters:
953 + partSection     - The PetscSection giving the division of points by partition
954 - partition       - The list of points by partition
955 
956   Options Database:
957 + -petscpartitioner_view - View the partitioner information
958 - -petscpartitioner_view_graph - View the graph we are partitioning
959 
960   Notes:
961     The chart of the vertexSection (if present) must contain [0,numVertices), with the number of dofs in the section specifying the absolute weight for each vertex.
962     The chart of the targetSection (if present) must contain [0,nparts), with the number of dofs in the section specifying the absolute weight for each partition. This information must be the same across processes, PETSc does not check it.
963 
964   Level: developer
965 
966 .seealso PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscSectionSetDof()
967 @*/
968 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertexSection, PetscSection targetSection, PetscSection partSection, IS *partition)
969 {
970   PetscErrorCode ierr;
971 
972   PetscFunctionBegin;
973   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
974   PetscValidLogicalCollectiveInt(part, nparts, 2);
975   if (nparts <= 0) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Number of parts must be positive");
976   if (numVertices < 0) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices must be non-negative");
977   if (numVertices && !part->noGraph) {
978     PetscValidIntPointer(start, 4);
979     PetscValidIntPointer(start + numVertices, 4);
980     if (start[numVertices]) PetscValidIntPointer(adjacency, 5);
981   }
982   if (vertexSection) {
983     PetscInt s,e;
984 
985     PetscValidHeaderSpecific(vertexSection, PETSC_SECTION_CLASSID, 6);
986     ierr = PetscSectionGetChart(vertexSection, &s, &e);CHKERRQ(ierr);
987     if (s > 0 || e < numVertices) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid vertexSection chart [%D,%D)",s,e);
988   }
989   if (targetSection) {
990     PetscInt s,e;
991 
992     PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 7);
993     ierr = PetscSectionGetChart(targetSection, &s, &e);CHKERRQ(ierr);
994     if (s > 0 || e < nparts) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid targetSection chart [%D,%D)",s,e);
995   }
996   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 8);
997   PetscValidPointer(partition, 9);
998 
999   ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
1000   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1001   if (nparts == 1) { /* quick */
1002     ierr = PetscSectionSetDof(partSection, 0, numVertices);CHKERRQ(ierr);
1003     ierr = ISCreateStride(PetscObjectComm((PetscObject)part),numVertices,0,1,partition);CHKERRQ(ierr);
1004   } else {
1005     if (!part->ops->partition) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "PetscPartitioner %s has no partitioning method", ((PetscObject)part)->type_name);
1006     ierr = (*part->ops->partition)(part, nparts, numVertices, start, adjacency, vertexSection, targetSection, partSection, partition);CHKERRQ(ierr);
1007   }
1008   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1009   if (part->viewerGraph) {
1010     PetscViewer viewer = part->viewerGraph;
1011     PetscBool   isascii;
1012     PetscInt    v, i;
1013     PetscMPIInt rank;
1014 
1015     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
1016     ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
1017     if (isascii) {
1018       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1019       ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
1020       for (v = 0; v < numVertices; ++v) {
1021         const PetscInt s = start[v];
1022         const PetscInt e = start[v+1];
1023 
1024         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
1025         for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
1026         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
1027       }
1028       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1029       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1030     }
1031   }
1032   if (part->viewer) {
1033     ierr = PetscPartitionerView(part,part->viewer);CHKERRQ(ierr);
1034   }
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 /*@
1039   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
1040 
1041   Collective
1042 
1043   Input Parameter:
1044 . comm - The communicator for the PetscPartitioner object
1045 
1046   Output Parameter:
1047 . part - The PetscPartitioner object
1048 
1049   Level: beginner
1050 
1051 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
1052 @*/
1053 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
1054 {
1055   PetscPartitioner p;
1056   const char       *partitionerType = NULL;
1057   PetscErrorCode   ierr;
1058 
1059   PetscFunctionBegin;
1060   PetscValidPointer(part, 2);
1061   *part = NULL;
1062   ierr = DMInitializePackage();CHKERRQ(ierr);
1063 
1064   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
1065   ierr = PetscPartitionerGetDefaultType(comm, NULL, &partitionerType);CHKERRQ(ierr);
1066   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
1067 
1068   p->edgeCut = 0;
1069   p->balance = 0.0;
1070   p->usevwgt = PETSC_TRUE;
1071 
1072   *part = p;
1073   PetscFunctionReturn(0);
1074 }
1075 
1076 /*@
1077   PetscPartitionerDMPlexPartition - Create a non-overlapping partition of the cells in the mesh
1078 
1079   Collective on PetscPartitioner
1080 
1081   Input Parameters:
1082 + part    - The PetscPartitioner
1083 . targetSection - The PetscSection describing the absolute weight of each partition (can be NULL)
1084 - dm      - The mesh DM
1085 
1086   Output Parameters:
1087 + partSection     - The PetscSection giving the division of points by partition
1088 - partition       - The list of points by partition
1089 
1090   Notes:
1091     If the DM has a local section associated, each point to be partitioned will be weighted by the total number of dofs identified
1092     by the section in the transitive closure of the point.
1093 
1094   Level: developer
1095 
1096 .seealso DMPlexDistribute(), PetscPartitionerCreate(), PetscSectionCreate(), PetscSectionSetChart(), PetscPartitionerPartition()
1097 @*/
1098 PetscErrorCode PetscPartitionerDMPlexPartition(PetscPartitioner part, DM dm, PetscSection targetSection, PetscSection partSection, IS *partition)
1099 {
1100   PetscMPIInt    size;
1101   PetscBool      isplex;
1102   PetscErrorCode ierr;
1103   PetscSection   vertSection = NULL;
1104 
1105   PetscFunctionBegin;
1106   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1107   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
1108   if (targetSection) PetscValidHeaderSpecific(targetSection, PETSC_SECTION_CLASSID, 3);
1109   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
1110   PetscValidPointer(partition, 5);
1111   ierr = PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&isplex);CHKERRQ(ierr);
1112   if (!isplex) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Not for type %s",((PetscObject)dm)->type_name);
1113   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
1114   if (size == 1) {
1115     PetscInt *points;
1116     PetscInt  cStart, cEnd, c;
1117 
1118     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1119     ierr = PetscSectionReset(partSection);CHKERRQ(ierr);
1120     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
1121     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
1122     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1123     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
1124     for (c = cStart; c < cEnd; ++c) points[c] = c;
1125     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1126     PetscFunctionReturn(0);
1127   }
1128   if (part->height == 0) {
1129     PetscInt numVertices = 0;
1130     PetscInt *start     = NULL;
1131     PetscInt *adjacency = NULL;
1132     IS       globalNumbering;
1133 
1134     if (!part->noGraph || part->viewGraph) {
1135       ierr = DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
1136     } else { /* only compute the number of owned local vertices */
1137       const PetscInt *idxs;
1138       PetscInt       p, pStart, pEnd;
1139 
1140       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1141       ierr = DMPlexCreateNumbering_Plex(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);CHKERRQ(ierr);
1142       ierr = ISGetIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1143       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
1144       ierr = ISRestoreIndices(globalNumbering, &idxs);CHKERRQ(ierr);
1145     }
1146     if (part->usevwgt) {
1147       PetscSection   section = dm->localSection, clSection = NULL;
1148       IS             clPoints = NULL;
1149       const PetscInt *gid,*clIdx;
1150       PetscInt       v, p, pStart, pEnd;
1151 
1152       /* dm->localSection encodes degrees of freedom per point, not per cell. We need to get the closure index to properly specify cell weights (aka dofs) */
1153       /* We do this only if the local section has been set */
1154       if (section) {
1155         ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, NULL);CHKERRQ(ierr);
1156         if (!clSection) {
1157           ierr = DMPlexCreateClosureIndex(dm,NULL);CHKERRQ(ierr);
1158         }
1159         ierr = PetscSectionGetClosureIndex(section, (PetscObject)dm, &clSection, &clPoints);CHKERRQ(ierr);
1160         ierr = ISGetIndices(clPoints,&clIdx);CHKERRQ(ierr);
1161       }
1162       ierr = DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);CHKERRQ(ierr);
1163       ierr = PetscSectionCreate(PETSC_COMM_SELF, &vertSection);CHKERRQ(ierr);
1164       ierr = PetscSectionSetChart(vertSection, 0, numVertices);CHKERRQ(ierr);
1165       if (globalNumbering) {
1166         ierr = ISGetIndices(globalNumbering,&gid);CHKERRQ(ierr);
1167       } else gid = NULL;
1168       for (p = pStart, v = 0; p < pEnd; ++p) {
1169         PetscInt dof = 1;
1170 
1171         /* skip cells in the overlap */
1172         if (gid && gid[p-pStart] < 0) continue;
1173 
1174         if (section) {
1175           PetscInt cl, clSize, clOff;
1176 
1177           dof  = 0;
1178           ierr = PetscSectionGetDof(clSection, p, &clSize);CHKERRQ(ierr);
1179           ierr = PetscSectionGetOffset(clSection, p, &clOff);CHKERRQ(ierr);
1180           for (cl = 0; cl < clSize; cl+=2) {
1181             PetscInt clDof, clPoint = clIdx[clOff + cl]; /* odd indices are reserved for orientations */
1182 
1183             ierr = PetscSectionGetDof(section, clPoint, &clDof);CHKERRQ(ierr);
1184             dof += clDof;
1185           }
1186         }
1187         if (!dof) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of dofs for point %D in the local section should be positive",p);
1188         ierr = PetscSectionSetDof(vertSection, v, dof);CHKERRQ(ierr);
1189         v++;
1190       }
1191       if (globalNumbering) {
1192         ierr = ISRestoreIndices(globalNumbering,&gid);CHKERRQ(ierr);
1193       }
1194       if (clPoints) {
1195         ierr = ISRestoreIndices(clPoints,&clIdx);CHKERRQ(ierr);
1196       }
1197       ierr = PetscSectionSetUp(vertSection);CHKERRQ(ierr);
1198     }
1199     ierr = PetscPartitionerPartition(part, size, numVertices, start, adjacency, vertSection, targetSection, partSection, partition);CHKERRQ(ierr);
1200     ierr = PetscFree(start);CHKERRQ(ierr);
1201     ierr = PetscFree(adjacency);CHKERRQ(ierr);
1202     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1203       const PetscInt *globalNum;
1204       const PetscInt *partIdx;
1205       PetscInt       *map, cStart, cEnd;
1206       PetscInt       *adjusted, i, localSize, offset;
1207       IS             newPartition;
1208 
1209       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
1210       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
1211       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1212       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
1213       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
1214       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
1215       for (i = cStart, offset = 0; i < cEnd; i++) {
1216         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1217       }
1218       for (i = 0; i < localSize; i++) {
1219         adjusted[i] = map[partIdx[i]];
1220       }
1221       ierr = PetscFree(map);CHKERRQ(ierr);
1222       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
1223       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
1224       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
1225       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
1226       ierr = ISDestroy(partition);CHKERRQ(ierr);
1227       *partition = newPartition;
1228     }
1229   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1230   ierr = PetscSectionDestroy(&vertSection);CHKERRQ(ierr);
1231   PetscFunctionReturn(0);
1232 }
1233 
1234 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1235 {
1236   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1237   PetscErrorCode          ierr;
1238 
1239   PetscFunctionBegin;
1240   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1241   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1242   ierr = PetscFree(p);CHKERRQ(ierr);
1243   PetscFunctionReturn(0);
1244 }
1245 
1246 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1247 {
1248   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1249   PetscErrorCode          ierr;
1250 
1251   PetscFunctionBegin;
1252   if (p->random) {
1253     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1254     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
1255     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1256   }
1257   PetscFunctionReturn(0);
1258 }
1259 
1260 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1261 {
1262   PetscBool      iascii;
1263   PetscErrorCode ierr;
1264 
1265   PetscFunctionBegin;
1266   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1267   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1268   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1269   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
1270   PetscFunctionReturn(0);
1271 }
1272 
1273 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1274 {
1275   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1276   PetscErrorCode          ierr;
1277 
1278   PetscFunctionBegin;
1279   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
1280   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
1281   ierr = PetscOptionsTail();CHKERRQ(ierr);
1282   PetscFunctionReturn(0);
1283 }
1284 
1285 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1286 {
1287   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1288   PetscInt                np;
1289   PetscErrorCode          ierr;
1290 
1291   PetscFunctionBegin;
1292   if (p->random) {
1293     PetscRandom r;
1294     PetscInt   *sizes, *points, v, p;
1295     PetscMPIInt rank;
1296 
1297     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) part), &rank);CHKERRQ(ierr);
1298     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1299     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
1300     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
1301     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
1302     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1303     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1304     for (v = numVertices-1; v > 0; --v) {
1305       PetscReal val;
1306       PetscInt  w, tmp;
1307 
1308       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
1309       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
1310       w    = PetscFloorReal(val);
1311       tmp       = points[v];
1312       points[v] = points[w];
1313       points[w] = tmp;
1314     }
1315     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1316     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
1317     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
1318   }
1319   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1320   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
1321   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1322   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
1323   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1324   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
1325   *partition = p->partition;
1326   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
1327   PetscFunctionReturn(0);
1328 }
1329 
1330 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1331 {
1332   PetscFunctionBegin;
1333   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1334   part->ops->view           = PetscPartitionerView_Shell;
1335   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1336   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1337   part->ops->partition      = PetscPartitionerPartition_Shell;
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 /*MC
1342   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
1343 
1344   Level: intermediate
1345 
1346   Options Database Keys:
1347 .  -petscpartitioner_shell_random - Use a random partition
1348 
1349 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1350 M*/
1351 
1352 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1353 {
1354   PetscPartitioner_Shell *p;
1355   PetscErrorCode          ierr;
1356 
1357   PetscFunctionBegin;
1358   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1359   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1360   part->data = p;
1361 
1362   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
1363   p->random = PETSC_FALSE;
1364   PetscFunctionReturn(0);
1365 }
1366 
1367 /*@C
1368   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
1369 
1370   Collective on PetscPartitioner
1371 
1372   Input Parameters:
1373 + part   - The PetscPartitioner
1374 . size   - The number of partitions
1375 . sizes  - array of length size (or NULL) providing the number of points in each partition
1376 - points - array of length sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
1377 
1378   Level: developer
1379 
1380   Notes:
1381     It is safe to free the sizes and points arrays after use in this routine.
1382 
1383 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1384 @*/
1385 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1386 {
1387   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1388   PetscInt                proc, numPoints;
1389   PetscErrorCode          ierr;
1390 
1391   PetscFunctionBegin;
1392   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1393   if (sizes)  {PetscValidPointer(sizes, 3);}
1394   if (points) {PetscValidPointer(points, 4);}
1395   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
1396   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
1397   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
1398   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
1399   if (sizes) {
1400     for (proc = 0; proc < size; ++proc) {
1401       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
1402     }
1403   }
1404   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
1405   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
1406   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 /*@
1411   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1412 
1413   Collective on PetscPartitioner
1414 
1415   Input Parameters:
1416 + part   - The PetscPartitioner
1417 - random - The flag to use a random partition
1418 
1419   Level: intermediate
1420 
1421 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1422 @*/
1423 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1424 {
1425   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1426 
1427   PetscFunctionBegin;
1428   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1429   p->random = random;
1430   PetscFunctionReturn(0);
1431 }
1432 
1433 /*@
1434   PetscPartitionerShellGetRandom - get the flag to use a random partition
1435 
1436   Collective on PetscPartitioner
1437 
1438   Input Parameter:
1439 . part   - The PetscPartitioner
1440 
1441   Output Parameter:
1442 . random - The flag to use a random partition
1443 
1444   Level: intermediate
1445 
1446 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1447 @*/
1448 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1449 {
1450   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1451 
1452   PetscFunctionBegin;
1453   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1454   PetscValidPointer(random, 2);
1455   *random = p->random;
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1460 {
1461   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1462   PetscErrorCode          ierr;
1463 
1464   PetscFunctionBegin;
1465   ierr = PetscFree(p);CHKERRQ(ierr);
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1470 {
1471   PetscFunctionBegin;
1472   PetscFunctionReturn(0);
1473 }
1474 
1475 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1476 {
1477   PetscBool      iascii;
1478   PetscErrorCode ierr;
1479 
1480   PetscFunctionBegin;
1481   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1482   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1483   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1484   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1485   PetscFunctionReturn(0);
1486 }
1487 
1488 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1489 {
1490   MPI_Comm       comm;
1491   PetscInt       np, *tpwgts = NULL, sumw = 0, numVerticesGlobal  = 0;
1492   PetscMPIInt    size;
1493   PetscErrorCode ierr;
1494 
1495   PetscFunctionBegin;
1496   if (vertSection) { ierr = PetscInfo(part,"PETSCPARTITIONERSIMPLE ignores vertex weights\n");CHKERRQ(ierr); }
1497   comm = PetscObjectComm((PetscObject)part);
1498   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1499   if (targetSection) {
1500     ierr = MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
1501     ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
1502     for (np = 0; np < nparts; ++np) {
1503       ierr = PetscSectionGetDof(targetSection,np,&tpwgts[np]);CHKERRQ(ierr);
1504       sumw += tpwgts[np];
1505     }
1506     if (!sumw) {
1507       ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1508     } else {
1509       PetscInt m,mp;
1510       for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np]*numVerticesGlobal)/sumw;
1511       for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
1512         if (m < tpwgts[np]) { m = tpwgts[np]; mp = np; }
1513         sumw += tpwgts[np];
1514       }
1515       if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
1516     }
1517   }
1518 
1519   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1520   if (size == 1) {
1521     if (tpwgts) {
1522       for (np = 0; np < nparts; ++np) {
1523         ierr = PetscSectionSetDof(partSection, np, tpwgts[np]);CHKERRQ(ierr);
1524       }
1525     } else {
1526       for (np = 0; np < nparts; ++np) {
1527         ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);
1528       }
1529     }
1530   } else {
1531     if (tpwgts) {
1532       Vec         v;
1533       PetscScalar *array;
1534       PetscInt    st,j;
1535       PetscMPIInt rank;
1536 
1537       ierr = VecCreate(comm,&v);CHKERRQ(ierr);
1538       ierr = VecSetSizes(v,numVertices,numVerticesGlobal);CHKERRQ(ierr);
1539       ierr = VecSetType(v,VECSTANDARD);CHKERRQ(ierr);
1540       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1541       for (np = 0,st = 0; np < nparts; ++np) {
1542         if (rank == np || (rank == size-1 && size < nparts && np >= size)) {
1543           for (j = 0; j < tpwgts[np]; j++) {
1544             ierr = VecSetValue(v,st+j,np,INSERT_VALUES);CHKERRQ(ierr);
1545           }
1546         }
1547         st += tpwgts[np];
1548       }
1549       ierr = VecAssemblyBegin(v);CHKERRQ(ierr);
1550       ierr = VecAssemblyEnd(v);CHKERRQ(ierr);
1551       ierr = VecGetArray(v,&array);CHKERRQ(ierr);
1552       for (j = 0; j < numVertices; ++j) {
1553         ierr = PetscSectionAddDof(partSection,PetscRealPart(array[j]),1);CHKERRQ(ierr);
1554       }
1555       ierr = VecRestoreArray(v,&array);CHKERRQ(ierr);
1556       ierr = VecDestroy(&v);CHKERRQ(ierr);
1557     } else {
1558       PetscMPIInt rank;
1559       PetscInt nvGlobal, *offsets, myFirst, myLast;
1560 
1561       ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1562       offsets[0] = 0;
1563       ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1564       for (np = 2; np <= size; np++) {
1565         offsets[np] += offsets[np-1];
1566       }
1567       nvGlobal = offsets[size];
1568       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1569       myFirst = offsets[rank];
1570       myLast  = offsets[rank + 1] - 1;
1571       ierr = PetscFree(offsets);CHKERRQ(ierr);
1572       if (numVertices) {
1573         PetscInt firstPart = 0, firstLargePart = 0;
1574         PetscInt lastPart = 0, lastLargePart = 0;
1575         PetscInt rem = nvGlobal % nparts;
1576         PetscInt pSmall = nvGlobal/nparts;
1577         PetscInt pBig = nvGlobal/nparts + 1;
1578 
1579         if (rem) {
1580           firstLargePart = myFirst / pBig;
1581           lastLargePart  = myLast  / pBig;
1582 
1583           if (firstLargePart < rem) {
1584             firstPart = firstLargePart;
1585           } else {
1586             firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1587           }
1588           if (lastLargePart < rem) {
1589             lastPart = lastLargePart;
1590           } else {
1591             lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1592           }
1593         } else {
1594           firstPart = myFirst / (nvGlobal/nparts);
1595           lastPart  = myLast  / (nvGlobal/nparts);
1596         }
1597 
1598         for (np = firstPart; np <= lastPart; np++) {
1599           PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1600           PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1601 
1602           PartStart = PetscMax(PartStart,myFirst);
1603           PartEnd   = PetscMin(PartEnd,myLast+1);
1604           ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1605         }
1606       }
1607     }
1608   }
1609   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1610   PetscFunctionReturn(0);
1611 }
1612 
1613 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1614 {
1615   PetscFunctionBegin;
1616   part->noGraph        = PETSC_TRUE;
1617   part->ops->view      = PetscPartitionerView_Simple;
1618   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1619   part->ops->partition = PetscPartitionerPartition_Simple;
1620   PetscFunctionReturn(0);
1621 }
1622 
1623 /*MC
1624   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1625 
1626   Level: intermediate
1627 
1628 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1629 M*/
1630 
1631 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1632 {
1633   PetscPartitioner_Simple *p;
1634   PetscErrorCode           ierr;
1635 
1636   PetscFunctionBegin;
1637   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1638   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1639   part->data = p;
1640 
1641   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1642   PetscFunctionReturn(0);
1643 }
1644 
1645 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1646 {
1647   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1648   PetscErrorCode          ierr;
1649 
1650   PetscFunctionBegin;
1651   ierr = PetscFree(p);CHKERRQ(ierr);
1652   PetscFunctionReturn(0);
1653 }
1654 
1655 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1656 {
1657   PetscFunctionBegin;
1658   PetscFunctionReturn(0);
1659 }
1660 
1661 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1662 {
1663   PetscBool      iascii;
1664   PetscErrorCode ierr;
1665 
1666   PetscFunctionBegin;
1667   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1668   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1669   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1670   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1671   PetscFunctionReturn(0);
1672 }
1673 
1674 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1675 {
1676   PetscInt       np;
1677   PetscErrorCode ierr;
1678 
1679   PetscFunctionBegin;
1680   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1681   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1682   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1683   PetscFunctionReturn(0);
1684 }
1685 
1686 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1687 {
1688   PetscFunctionBegin;
1689   part->noGraph        = PETSC_TRUE;
1690   part->ops->view      = PetscPartitionerView_Gather;
1691   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1692   part->ops->partition = PetscPartitionerPartition_Gather;
1693   PetscFunctionReturn(0);
1694 }
1695 
1696 /*MC
1697   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1698 
1699   Level: intermediate
1700 
1701 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1702 M*/
1703 
1704 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1705 {
1706   PetscPartitioner_Gather *p;
1707   PetscErrorCode           ierr;
1708 
1709   PetscFunctionBegin;
1710   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1711   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1712   part->data = p;
1713 
1714   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1715   PetscFunctionReturn(0);
1716 }
1717 
1718 
1719 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1720 {
1721   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1722   PetscErrorCode          ierr;
1723 
1724   PetscFunctionBegin;
1725   ierr = PetscFree(p);CHKERRQ(ierr);
1726   PetscFunctionReturn(0);
1727 }
1728 
1729 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1730 {
1731   PetscFunctionBegin;
1732   PetscFunctionReturn(0);
1733 }
1734 
1735 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1736 {
1737   PetscBool      iascii;
1738   PetscErrorCode ierr;
1739 
1740   PetscFunctionBegin;
1741   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1742   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1743   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1744   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1745   PetscFunctionReturn(0);
1746 }
1747 
1748 #if defined(PETSC_HAVE_CHACO)
1749 #if defined(PETSC_HAVE_UNISTD_H)
1750 #include <unistd.h>
1751 #endif
1752 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1753 #include <chaco.h>
1754 #else
1755 /* Older versions of Chaco do not have an include file */
1756 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1757                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1758                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1759                        int mesh_dims[3], double *goal, int global_method, int local_method,
1760                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1761 #endif
1762 extern int FREE_GRAPH;
1763 #endif
1764 
1765 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1766 {
1767 #if defined(PETSC_HAVE_CHACO)
1768   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1769   MPI_Comm       comm;
1770   int            nvtxs          = numVertices; /* number of vertices in full graph */
1771   int           *vwgts          = NULL;   /* weights for all vertices */
1772   float         *ewgts          = NULL;   /* weights for all edges */
1773   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1774   char          *outassignname  = NULL;   /*  name of assignment output file */
1775   char          *outfilename    = NULL;   /* output file name */
1776   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1777   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1778   int            mesh_dims[3];            /* dimensions of mesh of processors */
1779   double        *goal          = NULL;    /* desired set sizes for each set */
1780   int            global_method = 1;       /* global partitioning algorithm */
1781   int            local_method  = 1;       /* local partitioning algorithm */
1782   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1783   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1784   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1785   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1786   long           seed          = 123636512; /* for random graph mutations */
1787 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1788   int           *assignment;              /* Output partition */
1789 #else
1790   short int     *assignment;              /* Output partition */
1791 #endif
1792   int            fd_stdout, fd_pipe[2];
1793   PetscInt      *points;
1794   int            i, v, p;
1795   PetscErrorCode ierr;
1796 
1797   PetscFunctionBegin;
1798   ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
1799   if (PetscDefined (USE_DEBUG)) {
1800     int ival,isum;
1801     PetscBool distributed;
1802 
1803     ival = (numVertices > 0);
1804     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1805     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1806     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1807   }
1808   if (!numVertices) { /* distributed case, return if not holding the graph */
1809     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1810     PetscFunctionReturn(0);
1811   }
1812   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1813   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1814 
1815   if (global_method == INERTIAL_METHOD) {
1816     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1817     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1818   }
1819   mesh_dims[0] = nparts;
1820   mesh_dims[1] = 1;
1821   mesh_dims[2] = 1;
1822   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1823   /* Chaco outputs to stdout. We redirect this to a buffer. */
1824   /* TODO: check error codes for UNIX calls */
1825 #if defined(PETSC_HAVE_UNISTD_H)
1826   {
1827     int piperet;
1828     piperet = pipe(fd_pipe);
1829     if (piperet) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"Could not create pipe");
1830     fd_stdout = dup(1);
1831     close(1);
1832     dup2(fd_pipe[1], 1);
1833   }
1834 #endif
1835   if (part->usevwgt) { ierr = PetscInfo(part,"PETSCPARTITIONERCHACO ignores vertex weights\n");CHKERRQ(ierr); }
1836   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1837                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1838                    vmax, ndims, eigtol, seed);
1839 #if defined(PETSC_HAVE_UNISTD_H)
1840   {
1841     char msgLog[10000];
1842     int  count;
1843 
1844     fflush(stdout);
1845     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1846     if (count < 0) count = 0;
1847     msgLog[count] = 0;
1848     close(1);
1849     dup2(fd_stdout, 1);
1850     close(fd_stdout);
1851     close(fd_pipe[0]);
1852     close(fd_pipe[1]);
1853     if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1854   }
1855 #else
1856   if (ierr) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1857 #endif
1858   /* Convert to PetscSection+IS */
1859   for (v = 0; v < nvtxs; ++v) {
1860     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1861   }
1862   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1863   for (p = 0, i = 0; p < nparts; ++p) {
1864     for (v = 0; v < nvtxs; ++v) {
1865       if (assignment[v] == p) points[i++] = v;
1866     }
1867   }
1868   if (i != nvtxs) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1869   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1870   if (global_method == INERTIAL_METHOD) {
1871     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1872   }
1873   ierr = PetscFree(assignment);CHKERRQ(ierr);
1874   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1875   PetscFunctionReturn(0);
1876 #else
1877   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1878 #endif
1879 }
1880 
1881 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1882 {
1883   PetscFunctionBegin;
1884   part->noGraph        = PETSC_FALSE;
1885   part->ops->view      = PetscPartitionerView_Chaco;
1886   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1887   part->ops->partition = PetscPartitionerPartition_Chaco;
1888   PetscFunctionReturn(0);
1889 }
1890 
1891 /*MC
1892   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1893 
1894   Level: intermediate
1895 
1896 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1897 M*/
1898 
1899 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1900 {
1901   PetscPartitioner_Chaco *p;
1902   PetscErrorCode          ierr;
1903 
1904   PetscFunctionBegin;
1905   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1906   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1907   part->data = p;
1908 
1909   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1910   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1911   PetscFunctionReturn(0);
1912 }
1913 
1914 static const char *ptypes[] = {"kway", "rb"};
1915 
1916 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1917 {
1918   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1919   PetscErrorCode             ierr;
1920 
1921   PetscFunctionBegin;
1922   ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
1923   ierr = PetscFree(p);CHKERRQ(ierr);
1924   PetscFunctionReturn(0);
1925 }
1926 
1927 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1928 {
1929   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1930   PetscErrorCode             ierr;
1931 
1932   PetscFunctionBegin;
1933   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1934   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1935   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1936   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1937   ierr = PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);CHKERRQ(ierr);
1938   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1939   PetscFunctionReturn(0);
1940 }
1941 
1942 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1943 {
1944   PetscBool      iascii;
1945   PetscErrorCode ierr;
1946 
1947   PetscFunctionBegin;
1948   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1949   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1950   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1951   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1952   PetscFunctionReturn(0);
1953 }
1954 
1955 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1956 {
1957   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1958   PetscErrorCode            ierr;
1959 
1960   PetscFunctionBegin;
1961   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1962   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1963   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1964   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1965   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);CHKERRQ(ierr);
1966   ierr = PetscOptionsTail();CHKERRQ(ierr);
1967   PetscFunctionReturn(0);
1968 }
1969 
1970 #if defined(PETSC_HAVE_PARMETIS)
1971 #include <parmetis.h>
1972 #endif
1973 
1974 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
1975 {
1976 #if defined(PETSC_HAVE_PARMETIS)
1977   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1978   MPI_Comm       comm;
1979   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1980   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1981   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1982   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1983   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1984   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1985   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1986   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1987   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1988   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1989   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1990   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1991   PetscInt       options[64];               /* Options */
1992   PetscInt       v, i, *assignment, *points;
1993   PetscMPIInt    p, size, rank;
1994   PetscBool      hasempty = PETSC_FALSE;
1995   PetscErrorCode ierr;
1996 
1997   PetscFunctionBegin;
1998   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1999   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2000   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2001   /* Calculate vertex distribution */
2002   ierr = PetscMalloc4(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
2003   vtxdist[0] = 0;
2004   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2005   for (p = 2; p <= size; ++p) {
2006     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
2007     vtxdist[p] += vtxdist[p-1];
2008   }
2009   /* null graph */
2010   if (vtxdist[size] == 0) {
2011     ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
2012     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2013     PetscFunctionReturn(0);
2014   }
2015   /* Calculate partition weights */
2016   if (targetSection) {
2017     PetscInt p;
2018     real_t   sumt = 0.0;
2019 
2020     for (p = 0; p < nparts; ++p) {
2021       PetscInt tpd;
2022 
2023       ierr = PetscSectionGetDof(targetSection,p,&tpd);CHKERRQ(ierr);
2024       sumt += tpd;
2025       tpwgts[p] = tpd;
2026     }
2027     if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
2028       for (p = 0, sumt = 0.0; p < nparts; ++p) {
2029         tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
2030         sumt += tpwgts[p];
2031       }
2032       for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
2033       for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
2034       tpwgts[nparts - 1] = 1. - sumt;
2035     }
2036   } else {
2037     for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0/nparts;
2038   }
2039   ubvec[0] = pm->imbalanceRatio;
2040 
2041   /* Weight cells */
2042   if (vertSection) {
2043     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
2044     for (v = 0; v < nvtxs; ++v) {
2045       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
2046     }
2047     wgtflag |= 2; /* have weights on graph vertices */
2048   }
2049 
2050   for (p = 0; !vtxdist[p+1] && p < size; ++p);
2051   if (vtxdist[p+1] == vtxdist[size]) {
2052     if (rank == p) {
2053       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
2054       options[METIS_OPTION_DBGLVL] = pm->debugFlag;
2055       options[METIS_OPTION_SEED]   = pm->randomSeed;
2056       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
2057       if (metis_ptype == 1) {
2058         PetscStackPush("METIS_PartGraphRecursive");
2059         ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2060         PetscStackPop;
2061         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
2062       } else {
2063         /*
2064          It would be nice to activate the two options below, but they would need some actual testing.
2065          - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
2066          - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
2067         */
2068         /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
2069         /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
2070         PetscStackPush("METIS_PartGraphKway");
2071         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
2072         PetscStackPop;
2073         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
2074       }
2075     }
2076   } else {
2077     MPI_Comm pcomm;
2078 
2079     options[0] = 1; /*use options */
2080     options[1] = pm->debugFlag;
2081     options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
2082 
2083     if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
2084       PetscInt cnt;
2085 
2086       ierr = MPI_Comm_split(pm->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
2087       for (p=0,cnt=0;p<size;p++) {
2088         if (vtxdist[p+1] != vtxdist[p]) {
2089           vtxdist[cnt+1] = vtxdist[p+1];
2090           cnt++;
2091         }
2092       }
2093     } else pcomm = pm->pcomm;
2094     if (nvtxs) {
2095       PetscStackPush("ParMETIS_V3_PartKway");
2096       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
2097       PetscStackPop;
2098       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
2099     }
2100     if (hasempty) {
2101       ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
2102     }
2103   }
2104 
2105   /* Convert to PetscSection+IS */
2106   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2107   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2108   for (p = 0, i = 0; p < nparts; ++p) {
2109     for (v = 0; v < nvtxs; ++v) {
2110       if (assignment[v] == p) points[i++] = v;
2111     }
2112   }
2113   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2114   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2115   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
2116   ierr = PetscFree(vwgt);CHKERRQ(ierr);
2117   PetscFunctionReturn(0);
2118 #else
2119   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
2120 #endif
2121 }
2122 
2123 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
2124 {
2125   PetscFunctionBegin;
2126   part->noGraph             = PETSC_FALSE;
2127   part->ops->view           = PetscPartitionerView_ParMetis;
2128   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
2129   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
2130   part->ops->partition      = PetscPartitionerPartition_ParMetis;
2131   PetscFunctionReturn(0);
2132 }
2133 
2134 /*MC
2135   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
2136 
2137   Level: intermediate
2138 
2139   Options Database Keys:
2140 +  -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
2141 .  -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
2142 .  -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
2143 -  -petscpartitioner_parmetis_seed <int> - Random seed
2144 
2145   Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
2146 
2147 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2148 M*/
2149 
2150 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
2151 {
2152   PetscPartitioner_ParMetis *p;
2153   PetscErrorCode          ierr;
2154 
2155   PetscFunctionBegin;
2156   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2157   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
2158   part->data = p;
2159 
2160   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
2161   p->ptype          = 0;
2162   p->imbalanceRatio = 1.05;
2163   p->debugFlag      = 0;
2164   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
2165 
2166   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
2167   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
2168   PetscFunctionReturn(0);
2169 }
2170 
2171 #if defined(PETSC_HAVE_PTSCOTCH)
2172 
2173 EXTERN_C_BEGIN
2174 #include <ptscotch.h>
2175 EXTERN_C_END
2176 
2177 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
2178 
2179 static int PTScotch_Strategy(PetscInt strategy)
2180 {
2181   switch (strategy) {
2182   case  0: return SCOTCH_STRATDEFAULT;
2183   case  1: return SCOTCH_STRATQUALITY;
2184   case  2: return SCOTCH_STRATSPEED;
2185   case  3: return SCOTCH_STRATBALANCE;
2186   case  4: return SCOTCH_STRATSAFETY;
2187   case  5: return SCOTCH_STRATSCALABILITY;
2188   case  6: return SCOTCH_STRATRECURSIVE;
2189   case  7: return SCOTCH_STRATREMAP;
2190   default: return SCOTCH_STRATDEFAULT;
2191   }
2192 }
2193 
2194 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2195                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
2196 {
2197   SCOTCH_Graph   grafdat;
2198   SCOTCH_Strat   stradat;
2199   SCOTCH_Num     vertnbr = n;
2200   SCOTCH_Num     edgenbr = xadj[n];
2201   SCOTCH_Num*    velotab = vtxwgt;
2202   SCOTCH_Num*    edlotab = adjwgt;
2203   SCOTCH_Num     flagval = strategy;
2204   double         kbalval = imbalance;
2205   PetscErrorCode ierr;
2206 
2207   PetscFunctionBegin;
2208   {
2209     PetscBool flg = PETSC_TRUE;
2210     ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
2211     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2212     if (!flg) velotab = NULL;
2213   }
2214   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
2215   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
2216   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2217   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
2218   if (tpart) {
2219     SCOTCH_Arch archdat;
2220     ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2221     ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2222     ierr = SCOTCH_graphMap(&grafdat, &archdat, &stradat, part);CHKERRPTSCOTCH(ierr);
2223     SCOTCH_archExit(&archdat);
2224   } else {
2225     ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
2226   }
2227   SCOTCH_stratExit(&stradat);
2228   SCOTCH_graphExit(&grafdat);
2229   PetscFunctionReturn(0);
2230 }
2231 
2232 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
2233                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
2234 {
2235   PetscMPIInt     procglbnbr;
2236   PetscMPIInt     proclocnum;
2237   SCOTCH_Arch     archdat;
2238   SCOTCH_Dgraph   grafdat;
2239   SCOTCH_Dmapping mappdat;
2240   SCOTCH_Strat    stradat;
2241   SCOTCH_Num      vertlocnbr;
2242   SCOTCH_Num      edgelocnbr;
2243   SCOTCH_Num*     veloloctab = vtxwgt;
2244   SCOTCH_Num*     edloloctab = adjwgt;
2245   SCOTCH_Num      flagval = strategy;
2246   double          kbalval = imbalance;
2247   PetscErrorCode  ierr;
2248 
2249   PetscFunctionBegin;
2250   {
2251     PetscBool flg = PETSC_TRUE;
2252     ierr = PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");CHKERRQ(ierr);
2253     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
2254     if (!flg) veloloctab = NULL;
2255   }
2256   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
2257   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
2258   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
2259   edgelocnbr = xadj[vertlocnbr];
2260 
2261   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
2262   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
2263   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
2264   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
2265   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
2266   if (tpart) { /* target partition weights */
2267     ierr = SCOTCH_archCmpltw(&archdat, nparts, tpart);CHKERRPTSCOTCH(ierr);
2268   } else {
2269     ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
2270   }
2271   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
2272 
2273   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2274   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2275   SCOTCH_archExit(&archdat);
2276   SCOTCH_stratExit(&stradat);
2277   SCOTCH_dgraphExit(&grafdat);
2278   PetscFunctionReturn(0);
2279 }
2280 
2281 #endif /* PETSC_HAVE_PTSCOTCH */
2282 
2283 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2284 {
2285   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2286   PetscErrorCode             ierr;
2287 
2288   PetscFunctionBegin;
2289   ierr = MPI_Comm_free(&p->pcomm);CHKERRQ(ierr);
2290   ierr = PetscFree(p);CHKERRQ(ierr);
2291   PetscFunctionReturn(0);
2292 }
2293 
2294 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2295 {
2296   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2297   PetscErrorCode            ierr;
2298 
2299   PetscFunctionBegin;
2300   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
2301   ierr = PetscViewerASCIIPrintf(viewer,"using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
2302   ierr = PetscViewerASCIIPrintf(viewer,"using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
2303   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
2304   PetscFunctionReturn(0);
2305 }
2306 
2307 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2308 {
2309   PetscBool      iascii;
2310   PetscErrorCode ierr;
2311 
2312   PetscFunctionBegin;
2313   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2314   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
2315   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
2316   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
2317   PetscFunctionReturn(0);
2318 }
2319 
2320 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2321 {
2322   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2323   const char *const         *slist = PTScotchStrategyList;
2324   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2325   PetscBool                 flag;
2326   PetscErrorCode            ierr;
2327 
2328   PetscFunctionBegin;
2329   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
2330   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
2331   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
2332   ierr = PetscOptionsTail();CHKERRQ(ierr);
2333   PetscFunctionReturn(0);
2334 }
2335 
2336 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
2337 {
2338 #if defined(PETSC_HAVE_PTSCOTCH)
2339   MPI_Comm       comm;
2340   PetscInt       nvtxs = numVertices;   /* The number of vertices in full graph */
2341   PetscInt       *vtxdist;              /* Distribution of vertices across processes */
2342   PetscInt       *xadj   = start;       /* Start of edge list for each vertex */
2343   PetscInt       *adjncy = adjacency;   /* Edge lists for all vertices */
2344   PetscInt       *vwgt   = NULL;        /* Vertex weights */
2345   PetscInt       *adjwgt = NULL;        /* Edge weights */
2346   PetscInt       v, i, *assignment, *points;
2347   PetscMPIInt    size, rank, p;
2348   PetscBool      hasempty = PETSC_FALSE;
2349   PetscInt       *tpwgts = NULL;
2350   PetscErrorCode ierr;
2351 
2352   PetscFunctionBegin;
2353   ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
2354   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2355   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2356   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
2357   /* Calculate vertex distribution */
2358   vtxdist[0] = 0;
2359   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
2360   for (p = 2; p <= size; ++p) {
2361     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
2362     vtxdist[p] += vtxdist[p-1];
2363   }
2364   /* null graph */
2365   if (vtxdist[size] == 0) {
2366     ierr = PetscFree2(vtxdist, assignment);CHKERRQ(ierr);
2367     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2368     PetscFunctionReturn(0);
2369   }
2370 
2371   /* Calculate vertex weights */
2372   if (vertSection) {
2373     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
2374     for (v = 0; v < nvtxs; ++v) {
2375       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
2376     }
2377   }
2378 
2379   /* Calculate partition weights */
2380   if (targetSection) {
2381     PetscInt sumw;
2382 
2383     ierr = PetscCalloc1(nparts,&tpwgts);CHKERRQ(ierr);
2384     for (p = 0, sumw = 0; p < nparts; ++p) {
2385       ierr = PetscSectionGetDof(targetSection,p,&tpwgts[p]);CHKERRQ(ierr);
2386       sumw += tpwgts[p];
2387     }
2388     if (!sumw) {
2389       ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2390     }
2391   }
2392 
2393   {
2394     PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2395     int                       strat = PTScotch_Strategy(pts->strategy);
2396     double                    imbal = (double)pts->imbalance;
2397 
2398     for (p = 0; !vtxdist[p+1] && p < size; ++p);
2399     if (vtxdist[p+1] == vtxdist[size]) {
2400       if (rank == p) {
2401         ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment);CHKERRQ(ierr);
2402       }
2403     } else {
2404       PetscInt cnt;
2405       MPI_Comm pcomm;
2406 
2407       if (hasempty) {
2408         ierr = MPI_Comm_split(pts->pcomm,!!nvtxs,rank,&pcomm);CHKERRQ(ierr);
2409         for (p=0,cnt=0;p<size;p++) {
2410           if (vtxdist[p+1] != vtxdist[p]) {
2411             vtxdist[cnt+1] = vtxdist[p+1];
2412             cnt++;
2413           }
2414         }
2415       } else pcomm = pts->pcomm;
2416       if (nvtxs) {
2417         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm);CHKERRQ(ierr);
2418       }
2419       if (hasempty) {
2420         ierr = MPI_Comm_free(&pcomm);CHKERRQ(ierr);
2421       }
2422     }
2423   }
2424   ierr = PetscFree(vwgt);CHKERRQ(ierr);
2425   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2426 
2427   /* Convert to PetscSection+IS */
2428   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
2429   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
2430   for (p = 0, i = 0; p < nparts; ++p) {
2431     for (v = 0; v < nvtxs; ++v) {
2432       if (assignment[v] == p) points[i++] = v;
2433     }
2434   }
2435   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2436   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
2437 
2438   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
2439   PetscFunctionReturn(0);
2440 #else
2441   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2442 #endif
2443 }
2444 
2445 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2446 {
2447   PetscFunctionBegin;
2448   part->noGraph             = PETSC_FALSE;
2449   part->ops->view           = PetscPartitionerView_PTScotch;
2450   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2451   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2452   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2453   PetscFunctionReturn(0);
2454 }
2455 
2456 /*MC
2457   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
2458 
2459   Level: intermediate
2460 
2461   Options Database Keys:
2462 +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
2463 -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
2464 
2465   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
2466 
2467 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2468 M*/
2469 
2470 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2471 {
2472   PetscPartitioner_PTScotch *p;
2473   PetscErrorCode          ierr;
2474 
2475   PetscFunctionBegin;
2476   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
2477   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
2478   part->data = p;
2479 
2480   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRQ(ierr);
2481   p->strategy  = 0;
2482   p->imbalance = 0.01;
2483 
2484   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
2485   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
2486   PetscFunctionReturn(0);
2487 }
2488 
2489 /*@
2490   DMPlexGetPartitioner - Get the mesh partitioner
2491 
2492   Not collective
2493 
2494   Input Parameter:
2495 . dm - The DM
2496 
2497   Output Parameter:
2498 . part - The PetscPartitioner
2499 
2500   Level: developer
2501 
2502   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
2503 
2504 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerDMPlexPartition(), PetscPartitionerCreate()
2505 @*/
2506 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2507 {
2508   DM_Plex       *mesh = (DM_Plex *) dm->data;
2509 
2510   PetscFunctionBegin;
2511   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2512   PetscValidPointer(part, 2);
2513   *part = mesh->partitioner;
2514   PetscFunctionReturn(0);
2515 }
2516 
2517 /*@
2518   DMPlexSetPartitioner - Set the mesh partitioner
2519 
2520   logically collective on DM
2521 
2522   Input Parameters:
2523 + dm - The DM
2524 - part - The partitioner
2525 
2526   Level: developer
2527 
2528   Note: Any existing PetscPartitioner will be destroyed.
2529 
2530 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2531 @*/
2532 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2533 {
2534   DM_Plex       *mesh = (DM_Plex *) dm->data;
2535   PetscErrorCode ierr;
2536 
2537   PetscFunctionBegin;
2538   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2539   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2540   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2541   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2542   mesh->partitioner = part;
2543   PetscFunctionReturn(0);
2544 }
2545 
2546 static PetscErrorCode DMPlexAddClosure_Private(DM dm, PetscHSetI ht, PetscInt point)
2547 {
2548   const PetscInt *cone;
2549   PetscInt       coneSize, c;
2550   PetscBool      missing;
2551   PetscErrorCode ierr;
2552 
2553   PetscFunctionBeginHot;
2554   ierr = PetscHSetIQueryAdd(ht, point, &missing);CHKERRQ(ierr);
2555   if (missing) {
2556     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2557     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2558     for (c = 0; c < coneSize; c++) {
2559       ierr = DMPlexAddClosure_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2560     }
2561   }
2562   PetscFunctionReturn(0);
2563 }
2564 
2565 PETSC_UNUSED static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2566 {
2567   PetscErrorCode ierr;
2568 
2569   PetscFunctionBegin;
2570   if (up) {
2571     PetscInt parent;
2572 
2573     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2574     if (parent != point) {
2575       PetscInt closureSize, *closure = NULL, i;
2576 
2577       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2578       for (i = 0; i < closureSize; i++) {
2579         PetscInt cpoint = closure[2*i];
2580 
2581         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2582         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2583       }
2584       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2585     }
2586   }
2587   if (down) {
2588     PetscInt numChildren;
2589     const PetscInt *children;
2590 
2591     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2592     if (numChildren) {
2593       PetscInt i;
2594 
2595       for (i = 0; i < numChildren; i++) {
2596         PetscInt cpoint = children[i];
2597 
2598         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2599         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2600       }
2601     }
2602   }
2603   PetscFunctionReturn(0);
2604 }
2605 
2606 static PetscErrorCode DMPlexAddClosureTree_Up_Private(DM dm, PetscHSetI ht, PetscInt point)
2607 {
2608   PetscInt       parent;
2609   PetscErrorCode ierr;
2610 
2611   PetscFunctionBeginHot;
2612   ierr = DMPlexGetTreeParent(dm, point, &parent,NULL);CHKERRQ(ierr);
2613   if (point != parent) {
2614     const PetscInt *cone;
2615     PetscInt       coneSize, c;
2616 
2617     ierr = DMPlexAddClosureTree_Up_Private(dm, ht, parent);CHKERRQ(ierr);
2618     ierr = DMPlexAddClosure_Private(dm, ht, parent);CHKERRQ(ierr);
2619     ierr = DMPlexGetCone(dm, parent, &cone);CHKERRQ(ierr);
2620     ierr = DMPlexGetConeSize(dm, parent, &coneSize);CHKERRQ(ierr);
2621     for (c = 0; c < coneSize; c++) {
2622       const PetscInt cp = cone[c];
2623 
2624       ierr = DMPlexAddClosureTree_Up_Private(dm, ht, cp);CHKERRQ(ierr);
2625     }
2626   }
2627   PetscFunctionReturn(0);
2628 }
2629 
2630 static PetscErrorCode DMPlexAddClosureTree_Down_Private(DM dm, PetscHSetI ht, PetscInt point)
2631 {
2632   PetscInt       i, numChildren;
2633   const PetscInt *children;
2634   PetscErrorCode ierr;
2635 
2636   PetscFunctionBeginHot;
2637   ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
2638   for (i = 0; i < numChildren; i++) {
2639     ierr = PetscHSetIAdd(ht, children[i]);CHKERRQ(ierr);
2640   }
2641   PetscFunctionReturn(0);
2642 }
2643 
2644 static PetscErrorCode DMPlexAddClosureTree_Private(DM dm, PetscHSetI ht, PetscInt point)
2645 {
2646   const PetscInt *cone;
2647   PetscInt       coneSize, c;
2648   PetscErrorCode ierr;
2649 
2650   PetscFunctionBeginHot;
2651   ierr = PetscHSetIAdd(ht, point);CHKERRQ(ierr);
2652   ierr = DMPlexAddClosureTree_Up_Private(dm, ht, point);CHKERRQ(ierr);
2653   ierr = DMPlexAddClosureTree_Down_Private(dm, ht, point);CHKERRQ(ierr);
2654   ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
2655   ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
2656   for (c = 0; c < coneSize; c++) {
2657     ierr = DMPlexAddClosureTree_Private(dm, ht, cone[c]);CHKERRQ(ierr);
2658   }
2659   PetscFunctionReturn(0);
2660 }
2661 
2662 PetscErrorCode DMPlexClosurePoints_Private(DM dm, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2663 {
2664   DM_Plex         *mesh = (DM_Plex *)dm->data;
2665   const PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2666   PetscInt        nelems, *elems, off = 0, p;
2667   PetscHSetI      ht;
2668   PetscErrorCode  ierr;
2669 
2670   PetscFunctionBegin;
2671   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2672   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2673   if (!hasTree) {
2674     for (p = 0; p < numPoints; ++p) {
2675       ierr = DMPlexAddClosure_Private(dm, ht, points[p]);CHKERRQ(ierr);
2676     }
2677   } else {
2678 #if 1
2679     for (p = 0; p < numPoints; ++p) {
2680       ierr = DMPlexAddClosureTree_Private(dm, ht, points[p]);CHKERRQ(ierr);
2681     }
2682 #else
2683     PetscInt  *closure = NULL, closureSize, c;
2684     for (p = 0; p < numPoints; ++p) {
2685       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2686       for (c = 0; c < closureSize*2; c += 2) {
2687         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2688         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2689       }
2690     }
2691     if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2692 #endif
2693   }
2694   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2695   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2696   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2697   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2698   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2699   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2700   PetscFunctionReturn(0);
2701 }
2702 
2703 /*@
2704   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2705 
2706   Input Parameters:
2707 + dm     - The DM
2708 - label  - DMLabel assinging ranks to remote roots
2709 
2710   Level: developer
2711 
2712 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2713 @*/
2714 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2715 {
2716   IS              rankIS,   pointIS, closureIS;
2717   const PetscInt *ranks,   *points;
2718   PetscInt        numRanks, numPoints, r;
2719   PetscErrorCode  ierr;
2720 
2721   PetscFunctionBegin;
2722   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2723   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2724   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2725   for (r = 0; r < numRanks; ++r) {
2726     const PetscInt rank = ranks[r];
2727     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2728     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2729     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2730     ierr = DMPlexClosurePoints_Private(dm, numPoints, points, &closureIS);CHKERRQ(ierr);
2731     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2732     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2733     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2734     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2735   }
2736   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2737   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2738   PetscFunctionReturn(0);
2739 }
2740 
2741 /*@
2742   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2743 
2744   Input Parameters:
2745 + dm     - The DM
2746 - label  - DMLabel assinging ranks to remote roots
2747 
2748   Level: developer
2749 
2750 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2751 @*/
2752 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2753 {
2754   IS              rankIS,   pointIS;
2755   const PetscInt *ranks,   *points;
2756   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2757   PetscInt       *adj = NULL;
2758   PetscErrorCode  ierr;
2759 
2760   PetscFunctionBegin;
2761   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2762   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2763   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2764   for (r = 0; r < numRanks; ++r) {
2765     const PetscInt rank = ranks[r];
2766 
2767     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2768     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2769     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2770     for (p = 0; p < numPoints; ++p) {
2771       adjSize = PETSC_DETERMINE;
2772       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2773       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2774     }
2775     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2776     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2777   }
2778   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2779   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2780   ierr = PetscFree(adj);CHKERRQ(ierr);
2781   PetscFunctionReturn(0);
2782 }
2783 
2784 /*@
2785   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2786 
2787   Input Parameters:
2788 + dm     - The DM
2789 - label  - DMLabel assinging ranks to remote roots
2790 
2791   Level: developer
2792 
2793   Note: This is required when generating multi-level overlaps to capture
2794   overlap points from non-neighbouring partitions.
2795 
2796 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2797 @*/
2798 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2799 {
2800   MPI_Comm        comm;
2801   PetscMPIInt     rank;
2802   PetscSF         sfPoint;
2803   DMLabel         lblRoots, lblLeaves;
2804   IS              rankIS, pointIS;
2805   const PetscInt *ranks;
2806   PetscInt        numRanks, r;
2807   PetscErrorCode  ierr;
2808 
2809   PetscFunctionBegin;
2810   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2811   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2812   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2813   /* Pull point contributions from remote leaves into local roots */
2814   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2815   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2816   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2817   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2818   for (r = 0; r < numRanks; ++r) {
2819     const PetscInt remoteRank = ranks[r];
2820     if (remoteRank == rank) continue;
2821     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2822     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2823     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2824   }
2825   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2826   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2827   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2828   /* Push point contributions from roots into remote leaves */
2829   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2830   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2831   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2832   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2833   for (r = 0; r < numRanks; ++r) {
2834     const PetscInt remoteRank = ranks[r];
2835     if (remoteRank == rank) continue;
2836     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2837     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2838     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2839   }
2840   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2841   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2842   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2843   PetscFunctionReturn(0);
2844 }
2845 
2846 /*@
2847   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2848 
2849   Input Parameters:
2850 + dm        - The DM
2851 . rootLabel - DMLabel assinging ranks to local roots
2852 - processSF - A star forest mapping into the local index on each remote rank
2853 
2854   Output Parameter:
2855 . leafLabel - DMLabel assinging ranks to remote roots
2856 
2857   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2858   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2859 
2860   Level: developer
2861 
2862 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2863 @*/
2864 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2865 {
2866   MPI_Comm           comm;
2867   PetscMPIInt        rank, size, r;
2868   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2869   PetscSF            sfPoint;
2870   PetscSection       rootSection;
2871   PetscSFNode       *rootPoints, *leafPoints;
2872   const PetscSFNode *remote;
2873   const PetscInt    *local, *neighbors;
2874   IS                 valueIS;
2875   PetscBool          mpiOverflow = PETSC_FALSE;
2876   PetscErrorCode     ierr;
2877 
2878   PetscFunctionBegin;
2879   ierr = PetscLogEventBegin(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2880   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2881   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2882   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2883   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2884 
2885   /* Convert to (point, rank) and use actual owners */
2886   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2887   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2888   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2889   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2890   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2891   for (n = 0; n < numNeighbors; ++n) {
2892     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2893     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2894   }
2895   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2896   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2897   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2898   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2899   for (n = 0; n < numNeighbors; ++n) {
2900     IS              pointIS;
2901     const PetscInt *points;
2902 
2903     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2904     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2905     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2906     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2907     for (p = 0; p < numPoints; ++p) {
2908       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2909       else       {l = -1;}
2910       if (l >= 0) {rootPoints[off+p] = remote[l];}
2911       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2912     }
2913     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2914     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2915   }
2916 
2917   /* Try to communicate overlap using All-to-All */
2918   if (!processSF) {
2919     PetscInt64  counter = 0;
2920     PetscBool   locOverflow = PETSC_FALSE;
2921     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2922 
2923     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2924     for (n = 0; n < numNeighbors; ++n) {
2925       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2926       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2927 #if defined(PETSC_USE_64BIT_INDICES)
2928       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2929       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2930 #endif
2931       scounts[neighbors[n]] = (PetscMPIInt) dof;
2932       sdispls[neighbors[n]] = (PetscMPIInt) off;
2933     }
2934     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2935     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2936     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2937     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2938     if (!mpiOverflow) {
2939       ierr = PetscInfo(dm,"Using Alltoallv for mesh distribution\n");CHKERRQ(ierr);
2940       leafSize = (PetscInt) counter;
2941       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2942       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2943     }
2944     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2945   }
2946 
2947   /* Communicate overlap using process star forest */
2948   if (processSF || mpiOverflow) {
2949     PetscSF      procSF;
2950     PetscSection leafSection;
2951 
2952     if (processSF) {
2953       ierr = PetscInfo(dm,"Using processSF for mesh distribution\n");CHKERRQ(ierr);
2954       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2955       procSF = processSF;
2956     } else {
2957       ierr = PetscInfo(dm,"Using processSF for mesh distribution (MPI overflow)\n");CHKERRQ(ierr);
2958       ierr = PetscSFCreate(comm,&procSF);CHKERRQ(ierr);
2959       ierr = PetscSFSetGraphWithPattern(procSF,NULL,PETSCSF_PATTERN_ALLTOALL);CHKERRQ(ierr);
2960     }
2961 
2962     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2963     ierr = DMPlexDistributeData(dm, procSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2964     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2965     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2966     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2967   }
2968 
2969   for (p = 0; p < leafSize; p++) {
2970     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2971   }
2972 
2973   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2974   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2975   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2976   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2977   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2978   ierr = PetscLogEventEnd(DMPLEX_PartLabelInvert,dm,0,0,0);CHKERRQ(ierr);
2979   PetscFunctionReturn(0);
2980 }
2981 
2982 /*@
2983   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2984 
2985   Input Parameters:
2986 + dm    - The DM
2987 - label - DMLabel assinging ranks to remote roots
2988 
2989   Output Parameter:
2990 . sf    - The star forest communication context encapsulating the defined mapping
2991 
2992   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2993 
2994   Level: developer
2995 
2996 .seealso: DMPlexDistribute(), DMPlexCreateOverlap()
2997 @*/
2998 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2999 {
3000   PetscMPIInt     rank;
3001   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0, nNeighbors;
3002   PetscSFNode    *remotePoints;
3003   IS              remoteRootIS, neighborsIS;
3004   const PetscInt *remoteRoots, *neighbors;
3005   PetscErrorCode ierr;
3006 
3007   PetscFunctionBegin;
3008   ierr = PetscLogEventBegin(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
3009   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
3010 
3011   ierr = DMLabelGetValueIS(label, &neighborsIS);CHKERRQ(ierr);
3012 #if 0
3013   {
3014     IS is;
3015     ierr = ISDuplicate(neighborsIS, &is);CHKERRQ(ierr);
3016     ierr = ISSort(is);CHKERRQ(ierr);
3017     ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
3018     neighborsIS = is;
3019   }
3020 #endif
3021   ierr = ISGetLocalSize(neighborsIS, &nNeighbors);CHKERRQ(ierr);
3022   ierr = ISGetIndices(neighborsIS, &neighbors);CHKERRQ(ierr);
3023   for (numRemote = 0, n = 0; n < nNeighbors; ++n) {
3024     ierr = DMLabelGetStratumSize(label, neighbors[n], &numPoints);CHKERRQ(ierr);
3025     numRemote += numPoints;
3026   }
3027   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
3028   /* Put owned points first */
3029   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
3030   if (numPoints > 0) {
3031     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
3032     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3033     for (p = 0; p < numPoints; p++) {
3034       remotePoints[idx].index = remoteRoots[p];
3035       remotePoints[idx].rank = rank;
3036       idx++;
3037     }
3038     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3039     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
3040   }
3041   /* Now add remote points */
3042   for (n = 0; n < nNeighbors; ++n) {
3043     const PetscInt nn = neighbors[n];
3044 
3045     ierr = DMLabelGetStratumSize(label, nn, &numPoints);CHKERRQ(ierr);
3046     if (nn == rank || numPoints <= 0) continue;
3047     ierr = DMLabelGetStratumIS(label, nn, &remoteRootIS);CHKERRQ(ierr);
3048     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3049     for (p = 0; p < numPoints; p++) {
3050       remotePoints[idx].index = remoteRoots[p];
3051       remotePoints[idx].rank = nn;
3052       idx++;
3053     }
3054     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
3055     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
3056   }
3057   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
3058   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3059   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
3060   ierr = PetscSFSetUp(*sf);CHKERRQ(ierr);
3061   ierr = ISDestroy(&neighborsIS);CHKERRQ(ierr);
3062   ierr = PetscLogEventEnd(DMPLEX_PartLabelCreateSF,dm,0,0,0);CHKERRQ(ierr);
3063   PetscFunctionReturn(0);
3064 }
3065 
3066 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
3067  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
3068  * them out in that case. */
3069 #if defined(PETSC_HAVE_PARMETIS)
3070 /*@C
3071 
3072   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
3073 
3074   Input parameters:
3075 + dm                - The DMPlex object.
3076 . n                 - The number of points.
3077 . pointsToRewrite   - The points in the SF whose ownership will change.
3078 . targetOwners      - New owner for each element in pointsToRewrite.
3079 - degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
3080 
3081   Level: developer
3082 
3083 @*/
3084 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
3085 {
3086   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
3087   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
3088   PetscSFNode  *leafLocationsNew;
3089   const         PetscSFNode *iremote;
3090   const         PetscInt *ilocal;
3091   PetscBool    *isLeaf;
3092   PetscSF       sf;
3093   MPI_Comm      comm;
3094   PetscMPIInt   rank, size;
3095 
3096   PetscFunctionBegin;
3097   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3098   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3099   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3100   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3101 
3102   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3103   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3104   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3105   for (i=0; i<pEnd-pStart; i++) {
3106     isLeaf[i] = PETSC_FALSE;
3107   }
3108   for (i=0; i<nleafs; i++) {
3109     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3110   }
3111 
3112   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
3113   cumSumDegrees[0] = 0;
3114   for (i=1; i<=pEnd-pStart; i++) {
3115     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
3116   }
3117   sumDegrees = cumSumDegrees[pEnd-pStart];
3118   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
3119 
3120   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
3121   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
3122   for (i=0; i<pEnd-pStart; i++) {
3123     rankOnLeafs[i] = rank;
3124   }
3125   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
3126   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
3127   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
3128 
3129   /* get the remote local points of my leaves */
3130   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
3131   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
3132   for (i=0; i<pEnd-pStart; i++) {
3133     points[i] = pStart+i;
3134   }
3135   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
3136   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
3137   ierr = PetscFree(points);CHKERRQ(ierr);
3138   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
3139   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
3140   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
3141   for (i=0; i<pEnd-pStart; i++) {
3142     newOwners[i] = -1;
3143     newNumbers[i] = -1;
3144   }
3145   {
3146     PetscInt oldNumber, newNumber, oldOwner, newOwner;
3147     for (i=0; i<n; i++) {
3148       oldNumber = pointsToRewrite[i];
3149       newNumber = -1;
3150       oldOwner = rank;
3151       newOwner = targetOwners[i];
3152       if (oldOwner == newOwner) {
3153         newNumber = oldNumber;
3154       } else {
3155         for (j=0; j<degrees[oldNumber]; j++) {
3156           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
3157             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
3158             break;
3159           }
3160         }
3161       }
3162       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
3163 
3164       newOwners[oldNumber] = newOwner;
3165       newNumbers[oldNumber] = newNumber;
3166     }
3167   }
3168   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
3169   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
3170   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
3171 
3172   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
3173   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
3174   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
3175   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
3176 
3177   /* Now count how many leafs we have on each processor. */
3178   leafCounter=0;
3179   for (i=0; i<pEnd-pStart; i++) {
3180     if (newOwners[i] >= 0) {
3181       if (newOwners[i] != rank) {
3182         leafCounter++;
3183       }
3184     } else {
3185       if (isLeaf[i]) {
3186         leafCounter++;
3187       }
3188     }
3189   }
3190 
3191   /* Now set up the new sf by creating the leaf arrays */
3192   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
3193   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
3194 
3195   leafCounter = 0;
3196   counter = 0;
3197   for (i=0; i<pEnd-pStart; i++) {
3198     if (newOwners[i] >= 0) {
3199       if (newOwners[i] != rank) {
3200         leafsNew[leafCounter] = i;
3201         leafLocationsNew[leafCounter].rank = newOwners[i];
3202         leafLocationsNew[leafCounter].index = newNumbers[i];
3203         leafCounter++;
3204       }
3205     } else {
3206       if (isLeaf[i]) {
3207         leafsNew[leafCounter] = i;
3208         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
3209         leafLocationsNew[leafCounter].index = iremote[counter].index;
3210         leafCounter++;
3211       }
3212     }
3213     if (isLeaf[i]) {
3214       counter++;
3215     }
3216   }
3217 
3218   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
3219   ierr = PetscFree(newOwners);CHKERRQ(ierr);
3220   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
3221   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3222   PetscFunctionReturn(0);
3223 }
3224 
3225 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
3226 {
3227   PetscInt *distribution, min, max, sum, i, ierr;
3228   PetscMPIInt rank, size;
3229   PetscFunctionBegin;
3230   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3231   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3232   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
3233   for (i=0; i<n; i++) {
3234     if (part) distribution[part[i]] += vtxwgt[skip*i];
3235     else distribution[rank] += vtxwgt[skip*i];
3236   }
3237   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3238   min = distribution[0];
3239   max = distribution[0];
3240   sum = distribution[0];
3241   for (i=1; i<size; i++) {
3242     if (distribution[i]<min) min=distribution[i];
3243     if (distribution[i]>max) max=distribution[i];
3244     sum += distribution[i];
3245   }
3246   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
3247   ierr = PetscFree(distribution);CHKERRQ(ierr);
3248   PetscFunctionReturn(0);
3249 }
3250 
3251 #endif
3252 
3253 /*@
3254   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
3255 
3256   Input parameters:
3257 + dm               - The DMPlex object.
3258 . entityDepth      - depth of the entity to balance (0 -> balance vertices).
3259 . useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
3260 - parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
3261 
3262   Output parameters:
3263 . success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.
3264 
3265   Level: intermediate
3266 
3267 @*/
3268 
3269 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
3270 {
3271 #if defined(PETSC_HAVE_PARMETIS)
3272   PetscSF     sf;
3273   PetscInt    ierr, i, j, idx, jdx;
3274   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
3275   const       PetscInt *degrees, *ilocal;
3276   const       PetscSFNode *iremote;
3277   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
3278   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
3279   PetscMPIInt rank, size;
3280   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
3281   const       PetscInt *cumSumVertices;
3282   PetscInt    offset, counter;
3283   PetscInt    lenadjncy;
3284   PetscInt    *xadj, *adjncy, *vtxwgt;
3285   PetscInt    lenxadj;
3286   PetscInt    *adjwgt = NULL;
3287   PetscInt    *part, *options;
3288   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
3289   real_t      *ubvec;
3290   PetscInt    *firstVertices, *renumbering;
3291   PetscInt    failed, failedGlobal;
3292   MPI_Comm    comm;
3293   Mat         A, Apre;
3294   const char *prefix = NULL;
3295   PetscViewer       viewer;
3296   PetscViewerFormat format;
3297   PetscLayout layout;
3298 
3299   PetscFunctionBegin;
3300   if (success) *success = PETSC_FALSE;
3301   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
3302   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3303   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3304   if (size==1) PetscFunctionReturn(0);
3305 
3306   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3307 
3308   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
3309   if (viewer) {
3310     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
3311   }
3312 
3313   /* Figure out all points in the plex that we are interested in balancing. */
3314   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
3315   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
3316   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
3317 
3318   for (i=0; i<pEnd-pStart; i++) {
3319     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
3320   }
3321 
3322   /* There are three types of points:
3323    * exclusivelyOwned: points that are owned by this process and only seen by this process
3324    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
3325    * leaf: a point that is seen by this process but owned by a different process
3326    */
3327   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
3328   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
3329   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
3330   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
3331   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
3332   for (i=0; i<pEnd-pStart; i++) {
3333     isNonExclusivelyOwned[i] = PETSC_FALSE;
3334     isExclusivelyOwned[i] = PETSC_FALSE;
3335     isLeaf[i] = PETSC_FALSE;
3336   }
3337 
3338   /* start by marking all the leafs */
3339   for (i=0; i<nleafs; i++) {
3340     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3341   }
3342 
3343   /* for an owned point, we can figure out whether another processor sees it or
3344    * not by calculating its degree */
3345   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
3346   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
3347 
3348   numExclusivelyOwned = 0;
3349   numNonExclusivelyOwned = 0;
3350   for (i=0; i<pEnd-pStart; i++) {
3351     if (toBalance[i]) {
3352       if (degrees[i] > 0) {
3353         isNonExclusivelyOwned[i] = PETSC_TRUE;
3354         numNonExclusivelyOwned += 1;
3355       } else {
3356         if (!isLeaf[i]) {
3357           isExclusivelyOwned[i] = PETSC_TRUE;
3358           numExclusivelyOwned += 1;
3359         }
3360       }
3361     }
3362   }
3363 
3364   /* We are going to build a graph with one vertex per core representing the
3365    * exclusively owned points and then one vertex per nonExclusively owned
3366    * point. */
3367 
3368   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3369   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
3370   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3371   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
3372 
3373   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3374   for (i=0; i<pEnd-pStart; i++) {globalNumbersOfLocalOwnedVertices[i] = pStart - 1;}
3375   offset = cumSumVertices[rank];
3376   counter = 0;
3377   for (i=0; i<pEnd-pStart; i++) {
3378     if (toBalance[i]) {
3379       if (degrees[i] > 0) {
3380         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
3381         counter++;
3382       }
3383     }
3384   }
3385 
3386   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
3387   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
3388   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3389   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
3390 
3391   /* Now start building the data structure for ParMETIS */
3392 
3393   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
3394   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
3395   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3396   ierr = MatSetUp(Apre);CHKERRQ(ierr);
3397 
3398   for (i=0; i<pEnd-pStart; i++) {
3399     if (toBalance[i]) {
3400       idx = cumSumVertices[rank];
3401       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3402       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3403       else continue;
3404       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3405       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3406     }
3407   }
3408 
3409   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3410   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3411 
3412   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
3413   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
3414   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
3415   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
3416   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
3417 
3418   for (i=0; i<pEnd-pStart; i++) {
3419     if (toBalance[i]) {
3420       idx = cumSumVertices[rank];
3421       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3422       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3423       else continue;
3424       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
3425       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
3426     }
3427   }
3428 
3429   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3430   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3431   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
3432   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
3433 
3434   nparts = size;
3435   wgtflag = 2;
3436   numflag = 0;
3437   ncon = 2;
3438   real_t *tpwgts;
3439   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
3440   for (i=0; i<ncon*nparts; i++) {
3441     tpwgts[i] = 1./(nparts);
3442   }
3443 
3444   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
3445   ubvec[0] = 1.01;
3446   ubvec[1] = 1.01;
3447   lenadjncy = 0;
3448   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3449     PetscInt temp=0;
3450     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3451     lenadjncy += temp;
3452     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
3453   }
3454   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
3455   lenxadj = 2 + numNonExclusivelyOwned;
3456   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
3457   xadj[0] = 0;
3458   counter = 0;
3459   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3460     PetscInt        temp=0;
3461     const PetscInt *cols;
3462     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3463     ierr = PetscArraycpy(&adjncy[counter], cols, temp);CHKERRQ(ierr);
3464     counter += temp;
3465     xadj[i+1] = counter;
3466     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
3467   }
3468 
3469   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
3470   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
3471   vtxwgt[0] = numExclusivelyOwned;
3472   if (ncon>1) vtxwgt[1] = 1;
3473   for (i=0; i<numNonExclusivelyOwned; i++) {
3474     vtxwgt[ncon*(i+1)] = 1;
3475     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3476   }
3477 
3478   if (viewer) {
3479     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
3480     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
3481   }
3482   if (parallel) {
3483     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
3484     options[0] = 1;
3485     options[1] = 0; /* Verbosity */
3486     options[2] = 0; /* Seed */
3487     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3488     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
3489     if (useInitialGuess) {
3490       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
3491       PetscStackPush("ParMETIS_V3_RefineKway");
3492       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3493       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3494       PetscStackPop;
3495     } else {
3496       PetscStackPush("ParMETIS_V3_PartKway");
3497       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3498       PetscStackPop;
3499       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3500     }
3501     ierr = PetscFree(options);CHKERRQ(ierr);
3502   } else {
3503     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
3504     Mat As;
3505     PetscInt numRows;
3506     PetscInt *partGlobal;
3507     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
3508 
3509     PetscInt *numExclusivelyOwnedAll;
3510     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
3511     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3512     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
3513 
3514     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
3515     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
3516     if (!rank) {
3517       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3518       lenadjncy = 0;
3519 
3520       for (i=0; i<numRows; i++) {
3521         PetscInt temp=0;
3522         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3523         lenadjncy += temp;
3524         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
3525       }
3526       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
3527       lenxadj = 1 + numRows;
3528       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
3529       xadj_g[0] = 0;
3530       counter = 0;
3531       for (i=0; i<numRows; i++) {
3532         PetscInt        temp=0;
3533         const PetscInt *cols;
3534         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3535         ierr = PetscArraycpy(&adjncy_g[counter], cols, temp);CHKERRQ(ierr);
3536         counter += temp;
3537         xadj_g[i+1] = counter;
3538         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
3539       }
3540       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
3541       for (i=0; i<size; i++){
3542         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3543         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3544         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3545           vtxwgt_g[ncon*j] = 1;
3546           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3547         }
3548       }
3549       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
3550       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
3551       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3552       options[METIS_OPTION_CONTIG] = 1;
3553       PetscStackPush("METIS_PartGraphKway");
3554       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3555       PetscStackPop;
3556       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3557       ierr = PetscFree(options);CHKERRQ(ierr);
3558       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
3559       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
3560       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
3561     }
3562     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
3563 
3564     /* Now scatter the parts array. */
3565     {
3566       PetscMPIInt *counts, *mpiCumSumVertices;
3567       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
3568       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
3569       for(i=0; i<size; i++) {
3570         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
3571       }
3572       for(i=0; i<=size; i++) {
3573         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
3574       }
3575       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
3576       ierr = PetscFree(counts);CHKERRQ(ierr);
3577       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
3578     }
3579 
3580     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
3581     ierr = MatDestroy(&As);CHKERRQ(ierr);
3582   }
3583 
3584   ierr = MatDestroy(&A);CHKERRQ(ierr);
3585   ierr = PetscFree(ubvec);CHKERRQ(ierr);
3586   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
3587 
3588   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
3589 
3590   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
3591   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
3592   firstVertices[rank] = part[0];
3593   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
3594   for (i=0; i<size; i++) {
3595     renumbering[firstVertices[i]] = i;
3596   }
3597   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3598     part[i] = renumbering[part[i]];
3599   }
3600   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3601   failed = (PetscInt)(part[0] != rank);
3602   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
3603 
3604   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
3605   ierr = PetscFree(renumbering);CHKERRQ(ierr);
3606 
3607   if (failedGlobal > 0) {
3608     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3609     ierr = PetscFree(xadj);CHKERRQ(ierr);
3610     ierr = PetscFree(adjncy);CHKERRQ(ierr);
3611     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3612     ierr = PetscFree(toBalance);CHKERRQ(ierr);
3613     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3614     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3615     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3616     ierr = PetscFree(part);CHKERRQ(ierr);
3617     if (viewer) {
3618       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3619       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3620     }
3621     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3622     PetscFunctionReturn(0);
3623   }
3624 
3625   /*Let's check how well we did distributing points*/
3626   if (viewer) {
3627     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
3628     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
3629     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
3630     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
3631     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3632   }
3633 
3634   /* Now check that every vertex is owned by a process that it is actually connected to. */
3635   for (i=1; i<=numNonExclusivelyOwned; i++) {
3636     PetscInt loc = 0;
3637     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
3638     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3639     if (loc<0) {
3640       part[i] = rank;
3641     }
3642   }
3643 
3644   /* Let's see how significant the influences of the previous fixing up step was.*/
3645   if (viewer) {
3646     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
3647     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
3648   }
3649 
3650   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3651   ierr = PetscFree(xadj);CHKERRQ(ierr);
3652   ierr = PetscFree(adjncy);CHKERRQ(ierr);
3653   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
3654 
3655   /* Almost done, now rewrite the SF to reflect the new ownership. */
3656   {
3657     PetscInt *pointsToRewrite;
3658     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
3659     counter = 0;
3660     for(i=0; i<pEnd-pStart; i++) {
3661       if (toBalance[i]) {
3662         if (isNonExclusivelyOwned[i]) {
3663           pointsToRewrite[counter] = i + pStart;
3664           counter++;
3665         }
3666       }
3667     }
3668     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3669     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3670   }
3671 
3672   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3673   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3674   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3675   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3676   ierr = PetscFree(part);CHKERRQ(ierr);
3677   if (viewer) {
3678     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3679     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3680   }
3681   if (success) *success = PETSC_TRUE;
3682   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3683 #else
3684   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3685 #endif
3686   PetscFunctionReturn(0);
3687 }
3688