xref: /petsc/src/dm/impls/plex/plexpartition.c (revision edf60d87941db85ca894b66f7bafcd0614990de4)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 /*@C
31   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
32 
33   Input Parameters:
34 + dm      - The mesh DM dm
35 - height  - Height of the strata from which to construct the graph
36 
37   Output Parameter:
38 + numVertices     - Number of vertices in the graph
39 . offsets         - Point offsets in the graph
40 . adjacency       - Point connectivity in the graph
41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
42 
43   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscInt       *adjCells = NULL, *remoteCells = NULL;
61   const PetscInt *local;
62   PetscInt       nroots, nleaves, l;
63   PetscMPIInt    rank;
64   PetscErrorCode ierr;
65 
66   PetscFunctionBegin;
67   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
68   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
69   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
70   if (dim != depth) {
71     /* We do not handle the uninterpolated case here */
72     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
73     /* DMPlexCreateNeighborCSR does not make a numbering */
74     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
75     /* Different behavior for empty graphs */
76     if (!*numVertices) {
77       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
78       (*offsets)[0] = 0;
79     }
80     /* Broken in parallel */
81     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
82     PetscFunctionReturn(0);
83   }
84   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
85   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
86   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
87   /* Build adjacency graph via a section/segbuffer */
88   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
89   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
90   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
91   /* Always use FVM adjacency to create partitioner graph */
92   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
93   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
94   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
95   if (globalNumbering) {
96     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
97     *globalNumbering = cellNumbering;
98   }
99   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
100   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
101      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
102    */
103   ierr = PetscSFGetGraph(dm->sf, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
104   if (nroots >= 0) {
105     PetscInt fStart, fEnd, f;
106 
107     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
108     ierr = DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd);CHKERRQ(ierr);
109     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
110     for (f = fStart; f < fEnd; ++f) {
111       const PetscInt *support;
112       PetscInt        supportSize;
113 
114       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
115       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
116       if (supportSize == 1) adjCells[f] = cellNum[support[0]];
117       else if (supportSize == 2) {
118         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
119         if (p >= 0) adjCells[f] = cellNum[support[1]];
120         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
121         if (p >= 0) adjCells[f] = cellNum[support[0]];
122       }
123     }
124     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
125     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
126     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
127     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
128     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
129   }
130   /* Combine local and global adjacencies */
131   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
132     const PetscInt *cone;
133     PetscInt        coneSize, c;
134 
135     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
136     if (nroots > 0) {if (cellNum[p] < 0) continue;}
137     /* Add remote cells */
138     if (remoteCells) {
139       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
140       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
141       for (c = 0; c < coneSize; ++c) {
142         if (remoteCells[cone[c]] != -1) {
143           PetscInt *PETSC_RESTRICT pBuf;
144 
145           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
146           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
147           *pBuf = remoteCells[cone[c]];
148         }
149       }
150     }
151     /* Add local cells */
152     adjSize = PETSC_DETERMINE;
153     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
154     for (a = 0; a < adjSize; ++a) {
155       const PetscInt point = adj[a];
156       if (point != p && pStart <= point && point < pEnd) {
157         PetscInt *PETSC_RESTRICT pBuf;
158         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
159         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
160         *pBuf = cellNum[point];
161       }
162     }
163     (*numVertices)++;
164   }
165   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
166   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
167   /* Derive CSR graph from section/segbuffer */
168   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
169   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
170   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
171   for (idx = 0, p = pStart; p < pEnd; p++) {
172     if (nroots > 0) {if (cellNum[p] < 0) continue;}
173     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
174   }
175   vOffsets[*numVertices] = size;
176   if (offsets) *offsets = vOffsets;
177   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
178   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
179   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
180   if (adjacency) *adjacency = graph;
181   /* Clean up */
182   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
183   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
184   ierr = PetscFree(adj);CHKERRQ(ierr);
185   PetscFunctionReturn(0);
186 }
187 
188 /*@C
189   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
190 
191   Collective
192 
193   Input Arguments:
194 + dm - The DMPlex
195 - cellHeight - The height of mesh points to treat as cells (default should be 0)
196 
197   Output Arguments:
198 + numVertices - The number of local vertices in the graph, or cells in the mesh.
199 . offsets     - The offset to the adjacency list for each cell
200 - adjacency   - The adjacency list for all cells
201 
202   Note: This is suitable for input to a mesh partitioner like ParMetis.
203 
204   Level: advanced
205 
206 .seealso: DMPlexCreate()
207 @*/
208 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
209 {
210   const PetscInt maxFaceCases = 30;
211   PetscInt       numFaceCases = 0;
212   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
213   PetscInt      *off, *adj;
214   PetscInt      *neighborCells = NULL;
215   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
216   PetscErrorCode ierr;
217 
218   PetscFunctionBegin;
219   /* For parallel partitioning, I think you have to communicate supports */
220   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
221   cellDim = dim - cellHeight;
222   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
223   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
224   if (cEnd - cStart == 0) {
225     if (numVertices) *numVertices = 0;
226     if (offsets)   *offsets   = NULL;
227     if (adjacency) *adjacency = NULL;
228     PetscFunctionReturn(0);
229   }
230   numCells  = cEnd - cStart;
231   faceDepth = depth - cellHeight;
232   if (dim == depth) {
233     PetscInt f, fStart, fEnd;
234 
235     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
236     /* Count neighboring cells */
237     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
238     for (f = fStart; f < fEnd; ++f) {
239       const PetscInt *support;
240       PetscInt        supportSize;
241       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
242       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
243       if (supportSize == 2) {
244         PetscInt numChildren;
245 
246         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
247         if (!numChildren) {
248           ++off[support[0]-cStart+1];
249           ++off[support[1]-cStart+1];
250         }
251       }
252     }
253     /* Prefix sum */
254     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
255     if (adjacency) {
256       PetscInt *tmp;
257 
258       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
259       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
260       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
261       /* Get neighboring cells */
262       for (f = fStart; f < fEnd; ++f) {
263         const PetscInt *support;
264         PetscInt        supportSize;
265         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
266         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
267         if (supportSize == 2) {
268           PetscInt numChildren;
269 
270           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
271           if (!numChildren) {
272             adj[tmp[support[0]-cStart]++] = support[1];
273             adj[tmp[support[1]-cStart]++] = support[0];
274           }
275         }
276       }
277 #if defined(PETSC_USE_DEBUG)
278       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
279 #endif
280       ierr = PetscFree(tmp);CHKERRQ(ierr);
281     }
282     if (numVertices) *numVertices = numCells;
283     if (offsets)   *offsets   = off;
284     if (adjacency) *adjacency = adj;
285     PetscFunctionReturn(0);
286   }
287   /* Setup face recognition */
288   if (faceDepth == 1) {
289     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
290 
291     for (c = cStart; c < cEnd; ++c) {
292       PetscInt corners;
293 
294       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
295       if (!cornersSeen[corners]) {
296         PetscInt nFV;
297 
298         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
299         cornersSeen[corners] = 1;
300 
301         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
302 
303         numFaceVertices[numFaceCases++] = nFV;
304       }
305     }
306   }
307   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
308   /* Count neighboring cells */
309   for (cell = cStart; cell < cEnd; ++cell) {
310     PetscInt numNeighbors = PETSC_DETERMINE, n;
311 
312     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
313     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
314     for (n = 0; n < numNeighbors; ++n) {
315       PetscInt        cellPair[2];
316       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
317       PetscInt        meetSize = 0;
318       const PetscInt *meet    = NULL;
319 
320       cellPair[0] = cell; cellPair[1] = neighborCells[n];
321       if (cellPair[0] == cellPair[1]) continue;
322       if (!found) {
323         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
324         if (meetSize) {
325           PetscInt f;
326 
327           for (f = 0; f < numFaceCases; ++f) {
328             if (numFaceVertices[f] == meetSize) {
329               found = PETSC_TRUE;
330               break;
331             }
332           }
333         }
334         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
335       }
336       if (found) ++off[cell-cStart+1];
337     }
338   }
339   /* Prefix sum */
340   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
341 
342   if (adjacency) {
343     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
344     /* Get neighboring cells */
345     for (cell = cStart; cell < cEnd; ++cell) {
346       PetscInt numNeighbors = PETSC_DETERMINE, n;
347       PetscInt cellOffset   = 0;
348 
349       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
350       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
351       for (n = 0; n < numNeighbors; ++n) {
352         PetscInt        cellPair[2];
353         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
354         PetscInt        meetSize = 0;
355         const PetscInt *meet    = NULL;
356 
357         cellPair[0] = cell; cellPair[1] = neighborCells[n];
358         if (cellPair[0] == cellPair[1]) continue;
359         if (!found) {
360           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
361           if (meetSize) {
362             PetscInt f;
363 
364             for (f = 0; f < numFaceCases; ++f) {
365               if (numFaceVertices[f] == meetSize) {
366                 found = PETSC_TRUE;
367                 break;
368               }
369             }
370           }
371           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
372         }
373         if (found) {
374           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
375           ++cellOffset;
376         }
377       }
378     }
379   }
380   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
381   if (numVertices) *numVertices = numCells;
382   if (offsets)   *offsets   = off;
383   if (adjacency) *adjacency = adj;
384   PetscFunctionReturn(0);
385 }
386 
387 /*@C
388   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
389 
390   Not Collective
391 
392   Input Parameters:
393 + name        - The name of a new user-defined creation routine
394 - create_func - The creation routine itself
395 
396   Notes:
397   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
398 
399   Sample usage:
400 .vb
401     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
402 .ve
403 
404   Then, your PetscPartitioner type can be chosen with the procedural interface via
405 .vb
406     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
407     PetscPartitionerSetType(PetscPartitioner, "my_part");
408 .ve
409    or at runtime via the option
410 .vb
411     -petscpartitioner_type my_part
412 .ve
413 
414   Level: advanced
415 
416 .keywords: PetscPartitioner, register
417 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
418 
419 @*/
420 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
421 {
422   PetscErrorCode ierr;
423 
424   PetscFunctionBegin;
425   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
426   PetscFunctionReturn(0);
427 }
428 
429 /*@C
430   PetscPartitionerSetType - Builds a particular PetscPartitioner
431 
432   Collective on PetscPartitioner
433 
434   Input Parameters:
435 + part - The PetscPartitioner object
436 - name - The kind of partitioner
437 
438   Options Database Key:
439 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
440 
441   Level: intermediate
442 
443 .keywords: PetscPartitioner, set, type
444 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
445 @*/
446 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
447 {
448   PetscErrorCode (*r)(PetscPartitioner);
449   PetscBool      match;
450   PetscErrorCode ierr;
451 
452   PetscFunctionBegin;
453   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
454   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
455   if (match) PetscFunctionReturn(0);
456 
457   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
458   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
459   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
460 
461   if (part->ops->destroy) {
462     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
463   }
464   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
465   ierr = (*r)(part);CHKERRQ(ierr);
466   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
467   PetscFunctionReturn(0);
468 }
469 
470 /*@C
471   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
472 
473   Not Collective
474 
475   Input Parameter:
476 . part - The PetscPartitioner
477 
478   Output Parameter:
479 . name - The PetscPartitioner type name
480 
481   Level: intermediate
482 
483 .keywords: PetscPartitioner, get, type, name
484 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
485 @*/
486 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
487 {
488   PetscErrorCode ierr;
489 
490   PetscFunctionBegin;
491   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
492   PetscValidPointer(name, 2);
493   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
494   *name = ((PetscObject) part)->type_name;
495   PetscFunctionReturn(0);
496 }
497 
498 /*@C
499   PetscPartitionerView - Views a PetscPartitioner
500 
501   Collective on PetscPartitioner
502 
503   Input Parameter:
504 + part - the PetscPartitioner object to view
505 - v    - the viewer
506 
507   Level: developer
508 
509 .seealso: PetscPartitionerDestroy()
510 @*/
511 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
512 {
513   PetscMPIInt    size;
514   PetscBool      isascii;
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
519   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
520   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
521   if (isascii) {
522     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
523     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
524     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
525     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
526     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
527     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
528     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
529   }
530   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
531   PetscFunctionReturn(0);
532 }
533 
534 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
535 {
536   PetscFunctionBegin;
537   if (!currentType) {
538 #if defined(PETSC_HAVE_CHACO)
539     *defaultType = PETSCPARTITIONERCHACO;
540 #elif defined(PETSC_HAVE_PARMETIS)
541     *defaultType = PETSCPARTITIONERPARMETIS;
542 #elif defined(PETSC_HAVE_PTSCOTCH)
543     *defaultType = PETSCPARTITIONERPTSCOTCH;
544 #else
545     *defaultType = PETSCPARTITIONERSIMPLE;
546 #endif
547   } else {
548     *defaultType = currentType;
549   }
550   PetscFunctionReturn(0);
551 }
552 
553 /*@
554   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
555 
556   Collective on PetscPartitioner
557 
558   Input Parameter:
559 . part - the PetscPartitioner object to set options for
560 
561   Level: developer
562 
563 .seealso: PetscPartitionerView()
564 @*/
565 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
566 {
567   const char    *defaultType;
568   char           name[256];
569   PetscBool      flg;
570   PetscErrorCode ierr;
571 
572   PetscFunctionBegin;
573   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
574   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
575   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
576   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
577   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
578   if (flg) {
579     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
580   } else if (!((PetscObject) part)->type_name) {
581     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
582   }
583   if (part->ops->setfromoptions) {
584     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
585   }
586   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
587   /* process any options handlers added with PetscObjectAddOptionsHandler() */
588   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
589   ierr = PetscOptionsEnd();CHKERRQ(ierr);
590   PetscFunctionReturn(0);
591 }
592 
593 /*@C
594   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
595 
596   Collective on PetscPartitioner
597 
598   Input Parameter:
599 . part - the PetscPartitioner object to setup
600 
601   Level: developer
602 
603 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
604 @*/
605 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
606 {
607   PetscErrorCode ierr;
608 
609   PetscFunctionBegin;
610   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
611   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
612   PetscFunctionReturn(0);
613 }
614 
615 /*@
616   PetscPartitionerDestroy - Destroys a PetscPartitioner object
617 
618   Collective on PetscPartitioner
619 
620   Input Parameter:
621 . part - the PetscPartitioner object to destroy
622 
623   Level: developer
624 
625 .seealso: PetscPartitionerView()
626 @*/
627 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
628 {
629   PetscErrorCode ierr;
630 
631   PetscFunctionBegin;
632   if (!*part) PetscFunctionReturn(0);
633   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
634 
635   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
636   ((PetscObject) (*part))->refct = 0;
637 
638   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
639   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
640   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
641   PetscFunctionReturn(0);
642 }
643 
644 /*@
645   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
646 
647   Collective on MPI_Comm
648 
649   Input Parameter:
650 . comm - The communicator for the PetscPartitioner object
651 
652   Output Parameter:
653 . part - The PetscPartitioner object
654 
655   Level: beginner
656 
657 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
658 @*/
659 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
660 {
661   PetscPartitioner p;
662   const char       *partitionerType = NULL;
663   PetscErrorCode   ierr;
664 
665   PetscFunctionBegin;
666   PetscValidPointer(part, 2);
667   *part = NULL;
668   ierr = DMInitializePackage();CHKERRQ(ierr);
669 
670   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
671   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
672   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
673 
674   p->edgeCut = 0;
675   p->balance = 0.0;
676 
677   *part = p;
678   PetscFunctionReturn(0);
679 }
680 
681 /*@
682   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
683 
684   Collective on DM
685 
686   Input Parameters:
687 + part    - The PetscPartitioner
688 - dm      - The mesh DM
689 
690   Output Parameters:
691 + partSection     - The PetscSection giving the division of points by partition
692 - partition       - The list of points by partition
693 
694   Options Database:
695 . -petscpartitioner_view_graph - View the graph we are partitioning
696 
697   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
698 
699   Level: developer
700 
701 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
702 @*/
703 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
704 {
705   PetscMPIInt    size;
706   PetscErrorCode ierr;
707 
708   PetscFunctionBegin;
709   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
710   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
711   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
712   PetscValidPointer(partition, 5);
713   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
714   if (size == 1) {
715     PetscInt *points;
716     PetscInt  cStart, cEnd, c;
717 
718     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
719     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
720     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
721     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
722     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
723     for (c = cStart; c < cEnd; ++c) points[c] = c;
724     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
725     PetscFunctionReturn(0);
726   }
727   if (part->height == 0) {
728     PetscInt numVertices;
729     PetscInt *start     = NULL;
730     PetscInt *adjacency = NULL;
731     IS       globalNumbering;
732 
733     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
734     if (part->viewGraph) {
735       PetscViewer viewer = part->viewerGraph;
736       PetscBool   isascii;
737       PetscInt    v, i;
738       PetscMPIInt rank;
739 
740       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
741       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
742       if (isascii) {
743         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
744         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
745         for (v = 0; v < numVertices; ++v) {
746           const PetscInt s = start[v];
747           const PetscInt e = start[v+1];
748 
749           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
750           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
751           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
752         }
753         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
754         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
755       }
756     }
757     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
758     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
759     ierr = PetscFree(start);CHKERRQ(ierr);
760     ierr = PetscFree(adjacency);CHKERRQ(ierr);
761     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
762       const PetscInt *globalNum;
763       const PetscInt *partIdx;
764       PetscInt *map, cStart, cEnd;
765       PetscInt *adjusted, i, localSize, offset;
766       IS    newPartition;
767 
768       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
769       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
770       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
771       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
772       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
773       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
774       for (i = cStart, offset = 0; i < cEnd; i++) {
775         if (globalNum[i - cStart] >= 0) map[offset++] = i;
776       }
777       for (i = 0; i < localSize; i++) {
778         adjusted[i] = map[partIdx[i]];
779       }
780       ierr = PetscFree(map);CHKERRQ(ierr);
781       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
782       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
783       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
784       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
785       ierr = ISDestroy(partition);CHKERRQ(ierr);
786       *partition = newPartition;
787     }
788   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
789   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
790   PetscFunctionReturn(0);
791 }
792 
793 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
794 {
795   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
796   PetscErrorCode          ierr;
797 
798   PetscFunctionBegin;
799   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
800   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
801   ierr = PetscFree(p);CHKERRQ(ierr);
802   PetscFunctionReturn(0);
803 }
804 
805 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
806 {
807   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
808   PetscErrorCode          ierr;
809 
810   PetscFunctionBegin;
811   if (p->random) {
812     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
813     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
814     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
815   }
816   PetscFunctionReturn(0);
817 }
818 
819 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
820 {
821   PetscBool      iascii;
822   PetscErrorCode ierr;
823 
824   PetscFunctionBegin;
825   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
826   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
827   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
828   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
829   PetscFunctionReturn(0);
830 }
831 
832 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
833 {
834   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
835   PetscErrorCode          ierr;
836 
837   PetscFunctionBegin;
838   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
839   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
840   ierr = PetscOptionsTail();CHKERRQ(ierr);
841   PetscFunctionReturn(0);
842 }
843 
844 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
845 {
846   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
847   PetscInt                np;
848   PetscErrorCode          ierr;
849 
850   PetscFunctionBegin;
851   if (p->random) {
852     PetscRandom r;
853     PetscInt   *sizes, *points, v, p;
854     PetscMPIInt rank;
855 
856     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
857     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
858     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
859     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
860     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
861     for (v = 0; v < numVertices; ++v) {points[v] = v;}
862     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
863     for (v = numVertices-1; v > 0; --v) {
864       PetscReal val;
865       PetscInt  w, tmp;
866 
867       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
868       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
869       w    = PetscFloorReal(val);
870       tmp       = points[v];
871       points[v] = points[w];
872       points[w] = tmp;
873     }
874     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
875     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
876     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
877   }
878   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
879   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
880   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
881   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
882   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
883   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
884   *partition = p->partition;
885   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
886   PetscFunctionReturn(0);
887 }
888 
889 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
890 {
891   PetscFunctionBegin;
892   part->ops->view           = PetscPartitionerView_Shell;
893   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
894   part->ops->destroy        = PetscPartitionerDestroy_Shell;
895   part->ops->partition      = PetscPartitionerPartition_Shell;
896   PetscFunctionReturn(0);
897 }
898 
899 /*MC
900   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
901 
902   Level: intermediate
903 
904 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
905 M*/
906 
907 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
908 {
909   PetscPartitioner_Shell *p;
910   PetscErrorCode          ierr;
911 
912   PetscFunctionBegin;
913   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
914   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
915   part->data = p;
916 
917   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
918   p->random = PETSC_FALSE;
919   PetscFunctionReturn(0);
920 }
921 
922 /*@C
923   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
924 
925   Collective on Part
926 
927   Input Parameters:
928 + part     - The PetscPartitioner
929 . size - The number of partitions
930 . sizes    - array of size size (or NULL) providing the number of points in each partition
931 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
932 
933   Level: developer
934 
935   Notes:
936 
937     It is safe to free the sizes and points arrays after use in this routine.
938 
939 .seealso DMPlexDistribute(), PetscPartitionerCreate()
940 @*/
941 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
942 {
943   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
944   PetscInt                proc, numPoints;
945   PetscErrorCode          ierr;
946 
947   PetscFunctionBegin;
948   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
949   if (sizes)  {PetscValidPointer(sizes, 3);}
950   if (points) {PetscValidPointer(points, 4);}
951   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
952   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
953   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
954   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
955   if (sizes) {
956     for (proc = 0; proc < size; ++proc) {
957       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
958     }
959   }
960   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
961   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
962   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
963   PetscFunctionReturn(0);
964 }
965 
966 /*@
967   PetscPartitionerShellSetRandom - Set the flag to use a random partition
968 
969   Collective on Part
970 
971   Input Parameters:
972 + part   - The PetscPartitioner
973 - random - The flag to use a random partition
974 
975   Level: intermediate
976 
977 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
978 @*/
979 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
980 {
981   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
982 
983   PetscFunctionBegin;
984   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
985   p->random = random;
986   PetscFunctionReturn(0);
987 }
988 
989 /*@
990   PetscPartitionerShellGetRandom - get the flag to use a random partition
991 
992   Collective on Part
993 
994   Input Parameter:
995 . part   - The PetscPartitioner
996 
997   Output Parameter
998 . random - The flag to use a random partition
999 
1000   Level: intermediate
1001 
1002 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1003 @*/
1004 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1005 {
1006   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1007 
1008   PetscFunctionBegin;
1009   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1010   PetscValidPointer(random, 2);
1011   *random = p->random;
1012   PetscFunctionReturn(0);
1013 }
1014 
1015 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1016 {
1017   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1018   PetscErrorCode          ierr;
1019 
1020   PetscFunctionBegin;
1021   ierr = PetscFree(p);CHKERRQ(ierr);
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1026 {
1027   PetscFunctionBegin;
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1032 {
1033   PetscBool      iascii;
1034   PetscErrorCode ierr;
1035 
1036   PetscFunctionBegin;
1037   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1038   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1039   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1040   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1045 {
1046   MPI_Comm       comm;
1047   PetscInt       np;
1048   PetscMPIInt    size;
1049   PetscErrorCode ierr;
1050 
1051   PetscFunctionBegin;
1052   comm = PetscObjectComm((PetscObject)part);
1053   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1054   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1055   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1056   if (size == 1) {
1057     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1058   } else {
1059     PetscMPIInt rank;
1060     PetscInt nvGlobal, *offsets, myFirst, myLast;
1061 
1062     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1063     offsets[0] = 0;
1064     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1065     for (np = 2; np <= size; np++) {
1066       offsets[np] += offsets[np-1];
1067     }
1068     nvGlobal = offsets[size];
1069     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1070     myFirst = offsets[rank];
1071     myLast  = offsets[rank + 1] - 1;
1072     ierr = PetscFree(offsets);CHKERRQ(ierr);
1073     if (numVertices) {
1074       PetscInt firstPart = 0, firstLargePart = 0;
1075       PetscInt lastPart = 0, lastLargePart = 0;
1076       PetscInt rem = nvGlobal % nparts;
1077       PetscInt pSmall = nvGlobal/nparts;
1078       PetscInt pBig = nvGlobal/nparts + 1;
1079 
1080 
1081       if (rem) {
1082         firstLargePart = myFirst / pBig;
1083         lastLargePart  = myLast  / pBig;
1084 
1085         if (firstLargePart < rem) {
1086           firstPart = firstLargePart;
1087         } else {
1088           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1089         }
1090         if (lastLargePart < rem) {
1091           lastPart = lastLargePart;
1092         } else {
1093           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1094         }
1095       } else {
1096         firstPart = myFirst / (nvGlobal/nparts);
1097         lastPart  = myLast  / (nvGlobal/nparts);
1098       }
1099 
1100       for (np = firstPart; np <= lastPart; np++) {
1101         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1102         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1103 
1104         PartStart = PetscMax(PartStart,myFirst);
1105         PartEnd   = PetscMin(PartEnd,myLast+1);
1106         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1107       }
1108     }
1109   }
1110   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1111   PetscFunctionReturn(0);
1112 }
1113 
1114 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1115 {
1116   PetscFunctionBegin;
1117   part->ops->view      = PetscPartitionerView_Simple;
1118   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1119   part->ops->partition = PetscPartitionerPartition_Simple;
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 /*MC
1124   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1125 
1126   Level: intermediate
1127 
1128 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1129 M*/
1130 
1131 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1132 {
1133   PetscPartitioner_Simple *p;
1134   PetscErrorCode           ierr;
1135 
1136   PetscFunctionBegin;
1137   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1138   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1139   part->data = p;
1140 
1141   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1146 {
1147   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1148   PetscErrorCode          ierr;
1149 
1150   PetscFunctionBegin;
1151   ierr = PetscFree(p);CHKERRQ(ierr);
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1156 {
1157   PetscFunctionBegin;
1158   PetscFunctionReturn(0);
1159 }
1160 
1161 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1162 {
1163   PetscBool      iascii;
1164   PetscErrorCode ierr;
1165 
1166   PetscFunctionBegin;
1167   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1168   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1169   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1170   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1171   PetscFunctionReturn(0);
1172 }
1173 
1174 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1175 {
1176   PetscInt       np;
1177   PetscErrorCode ierr;
1178 
1179   PetscFunctionBegin;
1180   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1181   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1182   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1183   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1184   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1185   PetscFunctionReturn(0);
1186 }
1187 
1188 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1189 {
1190   PetscFunctionBegin;
1191   part->ops->view      = PetscPartitionerView_Gather;
1192   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1193   part->ops->partition = PetscPartitionerPartition_Gather;
1194   PetscFunctionReturn(0);
1195 }
1196 
1197 /*MC
1198   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1199 
1200   Level: intermediate
1201 
1202 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1203 M*/
1204 
1205 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1206 {
1207   PetscPartitioner_Gather *p;
1208   PetscErrorCode           ierr;
1209 
1210   PetscFunctionBegin;
1211   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1212   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1213   part->data = p;
1214 
1215   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1216   PetscFunctionReturn(0);
1217 }
1218 
1219 
1220 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1221 {
1222   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1223   PetscErrorCode          ierr;
1224 
1225   PetscFunctionBegin;
1226   ierr = PetscFree(p);CHKERRQ(ierr);
1227   PetscFunctionReturn(0);
1228 }
1229 
1230 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1231 {
1232   PetscFunctionBegin;
1233   PetscFunctionReturn(0);
1234 }
1235 
1236 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1237 {
1238   PetscBool      iascii;
1239   PetscErrorCode ierr;
1240 
1241   PetscFunctionBegin;
1242   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1243   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1244   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1245   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1246   PetscFunctionReturn(0);
1247 }
1248 
1249 #if defined(PETSC_HAVE_CHACO)
1250 #if defined(PETSC_HAVE_UNISTD_H)
1251 #include <unistd.h>
1252 #endif
1253 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1254 #include <chaco.h>
1255 #else
1256 /* Older versions of Chaco do not have an include file */
1257 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1258                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1259                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1260                        int mesh_dims[3], double *goal, int global_method, int local_method,
1261                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1262 #endif
1263 extern int FREE_GRAPH;
1264 #endif
1265 
1266 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1267 {
1268 #if defined(PETSC_HAVE_CHACO)
1269   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1270   MPI_Comm       comm;
1271   int            nvtxs          = numVertices; /* number of vertices in full graph */
1272   int           *vwgts          = NULL;   /* weights for all vertices */
1273   float         *ewgts          = NULL;   /* weights for all edges */
1274   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1275   char          *outassignname  = NULL;   /*  name of assignment output file */
1276   char          *outfilename    = NULL;   /* output file name */
1277   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1278   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1279   int            mesh_dims[3];            /* dimensions of mesh of processors */
1280   double        *goal          = NULL;    /* desired set sizes for each set */
1281   int            global_method = 1;       /* global partitioning algorithm */
1282   int            local_method  = 1;       /* local partitioning algorithm */
1283   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1284   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1285   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1286   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1287   long           seed          = 123636512; /* for random graph mutations */
1288 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1289   int           *assignment;              /* Output partition */
1290 #else
1291   short int     *assignment;              /* Output partition */
1292 #endif
1293   int            fd_stdout, fd_pipe[2];
1294   PetscInt      *points;
1295   int            i, v, p;
1296   PetscErrorCode ierr;
1297 
1298   PetscFunctionBegin;
1299   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1300 #if defined (PETSC_USE_DEBUG)
1301   {
1302     int ival,isum;
1303     PetscBool distributed;
1304 
1305     ival = (numVertices > 0);
1306     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1307     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1308     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1309   }
1310 #endif
1311   if (!numVertices) {
1312     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1313     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1314     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1315     PetscFunctionReturn(0);
1316   }
1317   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1318   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1319 
1320   if (global_method == INERTIAL_METHOD) {
1321     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1322     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1323   }
1324   mesh_dims[0] = nparts;
1325   mesh_dims[1] = 1;
1326   mesh_dims[2] = 1;
1327   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1328   /* Chaco outputs to stdout. We redirect this to a buffer. */
1329   /* TODO: check error codes for UNIX calls */
1330 #if defined(PETSC_HAVE_UNISTD_H)
1331   {
1332     int piperet;
1333     piperet = pipe(fd_pipe);
1334     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1335     fd_stdout = dup(1);
1336     close(1);
1337     dup2(fd_pipe[1], 1);
1338   }
1339 #endif
1340   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1341                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1342                    vmax, ndims, eigtol, seed);
1343 #if defined(PETSC_HAVE_UNISTD_H)
1344   {
1345     char msgLog[10000];
1346     int  count;
1347 
1348     fflush(stdout);
1349     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1350     if (count < 0) count = 0;
1351     msgLog[count] = 0;
1352     close(1);
1353     dup2(fd_stdout, 1);
1354     close(fd_stdout);
1355     close(fd_pipe[0]);
1356     close(fd_pipe[1]);
1357     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1358   }
1359 #else
1360   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1361 #endif
1362   /* Convert to PetscSection+IS */
1363   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1364   for (v = 0; v < nvtxs; ++v) {
1365     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1366   }
1367   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1368   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1369   for (p = 0, i = 0; p < nparts; ++p) {
1370     for (v = 0; v < nvtxs; ++v) {
1371       if (assignment[v] == p) points[i++] = v;
1372     }
1373   }
1374   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1375   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1376   if (global_method == INERTIAL_METHOD) {
1377     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1378   }
1379   ierr = PetscFree(assignment);CHKERRQ(ierr);
1380   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1381   PetscFunctionReturn(0);
1382 #else
1383   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1384 #endif
1385 }
1386 
1387 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1388 {
1389   PetscFunctionBegin;
1390   part->ops->view      = PetscPartitionerView_Chaco;
1391   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1392   part->ops->partition = PetscPartitionerPartition_Chaco;
1393   PetscFunctionReturn(0);
1394 }
1395 
1396 /*MC
1397   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1398 
1399   Level: intermediate
1400 
1401 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1402 M*/
1403 
1404 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1405 {
1406   PetscPartitioner_Chaco *p;
1407   PetscErrorCode          ierr;
1408 
1409   PetscFunctionBegin;
1410   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1411   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1412   part->data = p;
1413 
1414   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1415   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1416   PetscFunctionReturn(0);
1417 }
1418 
1419 static const char *ptypes[] = {"kway", "rb"};
1420 
1421 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1422 {
1423   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1424   PetscErrorCode             ierr;
1425 
1426   PetscFunctionBegin;
1427   ierr = PetscFree(p);CHKERRQ(ierr);
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1432 {
1433   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1434   PetscErrorCode             ierr;
1435 
1436   PetscFunctionBegin;
1437   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1438   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1439   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1440   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1441   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1442   PetscFunctionReturn(0);
1443 }
1444 
1445 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1446 {
1447   PetscBool      iascii;
1448   PetscErrorCode ierr;
1449 
1450   PetscFunctionBegin;
1451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1452   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1453   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1454   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1455   PetscFunctionReturn(0);
1456 }
1457 
1458 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1459 {
1460   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1461   PetscErrorCode            ierr;
1462 
1463   PetscFunctionBegin;
1464   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1465   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1466   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1467   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1468   ierr = PetscOptionsTail();CHKERRQ(ierr);
1469   PetscFunctionReturn(0);
1470 }
1471 
1472 #if defined(PETSC_HAVE_PARMETIS)
1473 #include <parmetis.h>
1474 #endif
1475 
1476 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1477 {
1478 #if defined(PETSC_HAVE_PARMETIS)
1479   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1480   MPI_Comm       comm;
1481   PetscSection   section;
1482   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1483   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1484   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1485   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1486   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1487   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1488   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1489   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1490   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1491   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1492   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1493   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1494   PetscInt       options[64];               /* Options */
1495   /* Outputs */
1496   PetscInt       v, i, *assignment, *points;
1497   PetscMPIInt    size, rank, p;
1498   PetscErrorCode ierr;
1499 
1500   PetscFunctionBegin;
1501   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1502   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1503   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1504   /* Calculate vertex distribution */
1505   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1506   vtxdist[0] = 0;
1507   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1508   for (p = 2; p <= size; ++p) {
1509     vtxdist[p] += vtxdist[p-1];
1510   }
1511   /* Calculate partition weights */
1512   for (p = 0; p < nparts; ++p) {
1513     tpwgts[p] = 1.0/nparts;
1514   }
1515   ubvec[0] = pm->imbalanceRatio;
1516   /* Weight cells by dofs on cell by default */
1517   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1518   if (section) {
1519     PetscInt cStart, cEnd, dof;
1520 
1521     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1522     for (v = cStart; v < cEnd; ++v) {
1523       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1524       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1525       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1526       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1527     }
1528   } else {
1529     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1530   }
1531   wgtflag |= 2; /* have weights on graph vertices */
1532 
1533   if (nparts == 1) {
1534     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1535   } else {
1536     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1537     if (vtxdist[p+1] == vtxdist[size]) {
1538       if (rank == p) {
1539         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1540         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1541         if (metis_ptype == 1) {
1542           PetscStackPush("METIS_PartGraphRecursive");
1543           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1544           PetscStackPop;
1545           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1546         } else {
1547           /*
1548            It would be nice to activate the two options below, but they would need some actual testing.
1549            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1550            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1551           */
1552           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1553           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1554           PetscStackPush("METIS_PartGraphKway");
1555           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1556           PetscStackPop;
1557           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1558         }
1559       }
1560     } else {
1561       options[0] = 1;
1562       options[1] = pm->debugFlag;
1563       PetscStackPush("ParMETIS_V3_PartKway");
1564       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1565       PetscStackPop;
1566       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1567     }
1568   }
1569   /* Convert to PetscSection+IS */
1570   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1571   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1572   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1573   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1574   for (p = 0, i = 0; p < nparts; ++p) {
1575     for (v = 0; v < nvtxs; ++v) {
1576       if (assignment[v] == p) points[i++] = v;
1577     }
1578   }
1579   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1580   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1581   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1582   PetscFunctionReturn(0);
1583 #else
1584   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1585 #endif
1586 }
1587 
1588 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1589 {
1590   PetscFunctionBegin;
1591   part->ops->view           = PetscPartitionerView_ParMetis;
1592   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1593   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1594   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1595   PetscFunctionReturn(0);
1596 }
1597 
1598 /*MC
1599   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1600 
1601   Level: intermediate
1602 
1603 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1604 M*/
1605 
1606 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1607 {
1608   PetscPartitioner_ParMetis *p;
1609   PetscErrorCode          ierr;
1610 
1611   PetscFunctionBegin;
1612   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1613   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1614   part->data = p;
1615 
1616   p->ptype          = 0;
1617   p->imbalanceRatio = 1.05;
1618   p->debugFlag      = 0;
1619 
1620   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1621   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1622   PetscFunctionReturn(0);
1623 }
1624 
1625 
1626 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1627 const char PTScotchPartitionerCitation[] =
1628   "@article{PTSCOTCH,\n"
1629   "  author  = {C. Chevalier and F. Pellegrini},\n"
1630   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1631   "  journal = {Parallel Computing},\n"
1632   "  volume  = {34},\n"
1633   "  number  = {6},\n"
1634   "  pages   = {318--331},\n"
1635   "  year    = {2008},\n"
1636   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1637   "}\n";
1638 
1639 typedef struct {
1640   PetscInt  strategy;
1641   PetscReal imbalance;
1642 } PetscPartitioner_PTScotch;
1643 
1644 static const char *const
1645 PTScotchStrategyList[] = {
1646   "DEFAULT",
1647   "QUALITY",
1648   "SPEED",
1649   "BALANCE",
1650   "SAFETY",
1651   "SCALABILITY",
1652   "RECURSIVE",
1653   "REMAP"
1654 };
1655 
1656 #if defined(PETSC_HAVE_PTSCOTCH)
1657 
1658 EXTERN_C_BEGIN
1659 #include <ptscotch.h>
1660 EXTERN_C_END
1661 
1662 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1663 
1664 static int PTScotch_Strategy(PetscInt strategy)
1665 {
1666   switch (strategy) {
1667   case  0: return SCOTCH_STRATDEFAULT;
1668   case  1: return SCOTCH_STRATQUALITY;
1669   case  2: return SCOTCH_STRATSPEED;
1670   case  3: return SCOTCH_STRATBALANCE;
1671   case  4: return SCOTCH_STRATSAFETY;
1672   case  5: return SCOTCH_STRATSCALABILITY;
1673   case  6: return SCOTCH_STRATRECURSIVE;
1674   case  7: return SCOTCH_STRATREMAP;
1675   default: return SCOTCH_STRATDEFAULT;
1676   }
1677 }
1678 
1679 
1680 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1681                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1682 {
1683   SCOTCH_Graph   grafdat;
1684   SCOTCH_Strat   stradat;
1685   SCOTCH_Num     vertnbr = n;
1686   SCOTCH_Num     edgenbr = xadj[n];
1687   SCOTCH_Num*    velotab = vtxwgt;
1688   SCOTCH_Num*    edlotab = adjwgt;
1689   SCOTCH_Num     flagval = strategy;
1690   double         kbalval = imbalance;
1691   PetscErrorCode ierr;
1692 
1693   PetscFunctionBegin;
1694   {
1695     PetscBool flg = PETSC_TRUE;
1696     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1697     if (!flg) velotab = NULL;
1698   }
1699   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1700   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1701   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1702   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1703 #if defined(PETSC_USE_DEBUG)
1704   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1705 #endif
1706   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1707   SCOTCH_stratExit(&stradat);
1708   SCOTCH_graphExit(&grafdat);
1709   PetscFunctionReturn(0);
1710 }
1711 
1712 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1713                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1714 {
1715   PetscMPIInt     procglbnbr;
1716   PetscMPIInt     proclocnum;
1717   SCOTCH_Arch     archdat;
1718   SCOTCH_Dgraph   grafdat;
1719   SCOTCH_Dmapping mappdat;
1720   SCOTCH_Strat    stradat;
1721   SCOTCH_Num      vertlocnbr;
1722   SCOTCH_Num      edgelocnbr;
1723   SCOTCH_Num*     veloloctab = vtxwgt;
1724   SCOTCH_Num*     edloloctab = adjwgt;
1725   SCOTCH_Num      flagval = strategy;
1726   double          kbalval = imbalance;
1727   PetscErrorCode  ierr;
1728 
1729   PetscFunctionBegin;
1730   {
1731     PetscBool flg = PETSC_TRUE;
1732     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1733     if (!flg) veloloctab = NULL;
1734   }
1735   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1736   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1737   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1738   edgelocnbr = xadj[vertlocnbr];
1739 
1740   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1741   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1742 #if defined(PETSC_USE_DEBUG)
1743   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1744 #endif
1745   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1746   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1747   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1748   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1749   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1750 
1751   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1752   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1753   SCOTCH_archExit(&archdat);
1754   SCOTCH_stratExit(&stradat);
1755   SCOTCH_dgraphExit(&grafdat);
1756   PetscFunctionReturn(0);
1757 }
1758 
1759 #endif /* PETSC_HAVE_PTSCOTCH */
1760 
1761 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1762 {
1763   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1764   PetscErrorCode             ierr;
1765 
1766   PetscFunctionBegin;
1767   ierr = PetscFree(p);CHKERRQ(ierr);
1768   PetscFunctionReturn(0);
1769 }
1770 
1771 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1772 {
1773   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1774   PetscErrorCode            ierr;
1775 
1776   PetscFunctionBegin;
1777   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1778   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1779   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1780   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1781   PetscFunctionReturn(0);
1782 }
1783 
1784 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1785 {
1786   PetscBool      iascii;
1787   PetscErrorCode ierr;
1788 
1789   PetscFunctionBegin;
1790   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1791   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1792   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1793   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1794   PetscFunctionReturn(0);
1795 }
1796 
1797 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1798 {
1799   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1800   const char *const         *slist = PTScotchStrategyList;
1801   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1802   PetscBool                 flag;
1803   PetscErrorCode            ierr;
1804 
1805   PetscFunctionBegin;
1806   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1807   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1808   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1809   ierr = PetscOptionsTail();CHKERRQ(ierr);
1810   PetscFunctionReturn(0);
1811 }
1812 
1813 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1814 {
1815 #if defined(PETSC_HAVE_PTSCOTCH)
1816   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1817   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1818   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1819   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1820   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1821   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1822   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1823   PetscInt       v, i, *assignment, *points;
1824   PetscMPIInt    size, rank, p;
1825   PetscErrorCode ierr;
1826 
1827   PetscFunctionBegin;
1828   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1829   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1830   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1831 
1832   /* Calculate vertex distribution */
1833   vtxdist[0] = 0;
1834   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1835   for (p = 2; p <= size; ++p) {
1836     vtxdist[p] += vtxdist[p-1];
1837   }
1838 
1839   if (nparts == 1) {
1840     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1841   } else {
1842     PetscSection section;
1843     /* Weight cells by dofs on cell by default */
1844     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1845     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1846     if (section) {
1847       PetscInt vStart, eEnd, dof;
1848       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &eEnd);CHKERRQ(ierr);
1849       for (v = vStart; v < eEnd; ++v) {
1850         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1851         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1852         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1853         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1854       }
1855     } else {
1856       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1857     }
1858     {
1859       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1860       int                       strat = PTScotch_Strategy(pts->strategy);
1861       double                    imbal = (double)pts->imbalance;
1862 
1863       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1864       if (vtxdist[p+1] == vtxdist[size]) {
1865         if (rank == p) {
1866           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1867         }
1868       } else {
1869         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1870       }
1871     }
1872     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1873   }
1874 
1875   /* Convert to PetscSection+IS */
1876   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1877   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1878   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1879   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1880   for (p = 0, i = 0; p < nparts; ++p) {
1881     for (v = 0; v < nvtxs; ++v) {
1882       if (assignment[v] == p) points[i++] = v;
1883     }
1884   }
1885   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1886   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1887 
1888   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1889   PetscFunctionReturn(0);
1890 #else
1891   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1892 #endif
1893 }
1894 
1895 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1896 {
1897   PetscFunctionBegin;
1898   part->ops->view           = PetscPartitionerView_PTScotch;
1899   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1900   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1901   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1902   PetscFunctionReturn(0);
1903 }
1904 
1905 /*MC
1906   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1907 
1908   Level: intermediate
1909 
1910 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1911 M*/
1912 
1913 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1914 {
1915   PetscPartitioner_PTScotch *p;
1916   PetscErrorCode          ierr;
1917 
1918   PetscFunctionBegin;
1919   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1920   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1921   part->data = p;
1922 
1923   p->strategy  = 0;
1924   p->imbalance = 0.01;
1925 
1926   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1927   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1928   PetscFunctionReturn(0);
1929 }
1930 
1931 
1932 /*@
1933   DMPlexGetPartitioner - Get the mesh partitioner
1934 
1935   Not collective
1936 
1937   Input Parameter:
1938 . dm - The DM
1939 
1940   Output Parameter:
1941 . part - The PetscPartitioner
1942 
1943   Level: developer
1944 
1945   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1946 
1947 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1948 @*/
1949 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1950 {
1951   DM_Plex       *mesh = (DM_Plex *) dm->data;
1952 
1953   PetscFunctionBegin;
1954   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1955   PetscValidPointer(part, 2);
1956   *part = mesh->partitioner;
1957   PetscFunctionReturn(0);
1958 }
1959 
1960 /*@
1961   DMPlexSetPartitioner - Set the mesh partitioner
1962 
1963   logically collective on dm and part
1964 
1965   Input Parameters:
1966 + dm - The DM
1967 - part - The partitioner
1968 
1969   Level: developer
1970 
1971   Note: Any existing PetscPartitioner will be destroyed.
1972 
1973 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1974 @*/
1975 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1976 {
1977   DM_Plex       *mesh = (DM_Plex *) dm->data;
1978   PetscErrorCode ierr;
1979 
1980   PetscFunctionBegin;
1981   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1982   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1983   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1984   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1985   mesh->partitioner = part;
1986   PetscFunctionReturn(0);
1987 }
1988 
1989 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
1990 {
1991   PetscErrorCode ierr;
1992 
1993   PetscFunctionBegin;
1994   if (up) {
1995     PetscInt parent;
1996 
1997     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1998     if (parent != point) {
1999       PetscInt closureSize, *closure = NULL, i;
2000 
2001       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2002       for (i = 0; i < closureSize; i++) {
2003         PetscInt cpoint = closure[2*i];
2004 
2005         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2006         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2007       }
2008       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2009     }
2010   }
2011   if (down) {
2012     PetscInt numChildren;
2013     const PetscInt *children;
2014 
2015     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2016     if (numChildren) {
2017       PetscInt i;
2018 
2019       for (i = 0; i < numChildren; i++) {
2020         PetscInt cpoint = children[i];
2021 
2022         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2023         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2024       }
2025     }
2026   }
2027   PetscFunctionReturn(0);
2028 }
2029 
2030 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
2031 
2032 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2033 {
2034   DM_Plex        *mesh = (DM_Plex *)dm->data;
2035   PetscBool      hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2036   PetscInt       *closure = NULL, closureSize, nelems, *elems, off = 0, p, c;
2037   PetscHSetI     ht;
2038   PetscErrorCode ierr;
2039 
2040   PetscFunctionBegin;
2041   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2042   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2043   for (p = 0; p < numPoints; ++p) {
2044     ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2045     for (c = 0; c < closureSize*2; c += 2) {
2046       ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2047       if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2048     }
2049   }
2050   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2051   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2052   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2053   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2054   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2055   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2056   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2057   PetscFunctionReturn(0);
2058 }
2059 
2060 /*@
2061   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2062 
2063   Input Parameters:
2064 + dm     - The DM
2065 - label  - DMLabel assinging ranks to remote roots
2066 
2067   Level: developer
2068 
2069 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2070 @*/
2071 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2072 {
2073   IS              rankIS,   pointIS, closureIS;
2074   const PetscInt *ranks,   *points;
2075   PetscInt        numRanks, numPoints, r;
2076   PetscErrorCode  ierr;
2077 
2078   PetscFunctionBegin;
2079   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2080   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2081   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2082   for (r = 0; r < numRanks; ++r) {
2083     const PetscInt rank = ranks[r];
2084     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2085     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2086     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2087     ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr);
2088     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2089     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2090     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2091     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2092   }
2093   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2094   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2095   PetscFunctionReturn(0);
2096 }
2097 
2098 /*@
2099   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2100 
2101   Input Parameters:
2102 + dm     - The DM
2103 - label  - DMLabel assinging ranks to remote roots
2104 
2105   Level: developer
2106 
2107 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2108 @*/
2109 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2110 {
2111   IS              rankIS,   pointIS;
2112   const PetscInt *ranks,   *points;
2113   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2114   PetscInt       *adj = NULL;
2115   PetscErrorCode  ierr;
2116 
2117   PetscFunctionBegin;
2118   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2119   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2120   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2121   for (r = 0; r < numRanks; ++r) {
2122     const PetscInt rank = ranks[r];
2123 
2124     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2125     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2126     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2127     for (p = 0; p < numPoints; ++p) {
2128       adjSize = PETSC_DETERMINE;
2129       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2130       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2131     }
2132     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2133     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2134   }
2135   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2136   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2137   ierr = PetscFree(adj);CHKERRQ(ierr);
2138   PetscFunctionReturn(0);
2139 }
2140 
2141 /*@
2142   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2143 
2144   Input Parameters:
2145 + dm     - The DM
2146 - label  - DMLabel assinging ranks to remote roots
2147 
2148   Level: developer
2149 
2150   Note: This is required when generating multi-level overlaps to capture
2151   overlap points from non-neighbouring partitions.
2152 
2153 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2154 @*/
2155 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2156 {
2157   MPI_Comm        comm;
2158   PetscMPIInt     rank;
2159   PetscSF         sfPoint;
2160   DMLabel         lblRoots, lblLeaves;
2161   IS              rankIS, pointIS;
2162   const PetscInt *ranks;
2163   PetscInt        numRanks, r;
2164   PetscErrorCode  ierr;
2165 
2166   PetscFunctionBegin;
2167   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2168   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2169   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2170   /* Pull point contributions from remote leaves into local roots */
2171   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2172   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2173   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2174   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2175   for (r = 0; r < numRanks; ++r) {
2176     const PetscInt remoteRank = ranks[r];
2177     if (remoteRank == rank) continue;
2178     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2179     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2180     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2181   }
2182   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2183   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2184   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2185   /* Push point contributions from roots into remote leaves */
2186   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2187   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2188   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2189   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2190   for (r = 0; r < numRanks; ++r) {
2191     const PetscInt remoteRank = ranks[r];
2192     if (remoteRank == rank) continue;
2193     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2194     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2195     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2196   }
2197   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2198   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2199   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2200   PetscFunctionReturn(0);
2201 }
2202 
2203 /*@
2204   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2205 
2206   Input Parameters:
2207 + dm        - The DM
2208 . rootLabel - DMLabel assinging ranks to local roots
2209 . processSF - A star forest mapping into the local index on each remote rank
2210 
2211   Output Parameter:
2212 - leafLabel - DMLabel assinging ranks to remote roots
2213 
2214   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2215   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2216 
2217   Level: developer
2218 
2219 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2220 @*/
2221 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2222 {
2223   MPI_Comm           comm;
2224   PetscMPIInt        rank, size, r;
2225   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2226   PetscSF            sfPoint;
2227   PetscSection       rootSection;
2228   PetscSFNode       *rootPoints, *leafPoints;
2229   const PetscSFNode *remote;
2230   const PetscInt    *local, *neighbors;
2231   IS                 valueIS;
2232   PetscBool          mpiOverflow = PETSC_FALSE;
2233   PetscErrorCode     ierr;
2234 
2235   PetscFunctionBegin;
2236   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2237   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2238   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2239   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2240 
2241   /* Convert to (point, rank) and use actual owners */
2242   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2243   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2244   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2245   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2246   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2247   for (n = 0; n < numNeighbors; ++n) {
2248     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2249     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2250   }
2251   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2252   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2253   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2254   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2255   for (n = 0; n < numNeighbors; ++n) {
2256     IS              pointIS;
2257     const PetscInt *points;
2258 
2259     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2260     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2261     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2262     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2263     for (p = 0; p < numPoints; ++p) {
2264       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2265       else       {l = -1;}
2266       if (l >= 0) {rootPoints[off+p] = remote[l];}
2267       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2268     }
2269     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2270     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2271   }
2272 
2273   /* Try to communicate overlap using All-to-All */
2274   if (!processSF) {
2275     PetscInt64  counter = 0;
2276     PetscBool   locOverflow = PETSC_FALSE;
2277     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2278 
2279     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2280     for (n = 0; n < numNeighbors; ++n) {
2281       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2282       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2283 #if defined(PETSC_USE_64BIT_INDICES)
2284       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2285       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2286 #endif
2287       scounts[neighbors[n]] = (PetscMPIInt) dof;
2288       sdispls[neighbors[n]] = (PetscMPIInt) off;
2289     }
2290     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2291     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2292     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2293     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2294     if (!mpiOverflow) {
2295       leafSize = (PetscInt) counter;
2296       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2297       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2298     }
2299     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2300   }
2301 
2302   /* Communicate overlap using process star forest */
2303   if (processSF || mpiOverflow) {
2304     PetscSF      procSF;
2305     PetscSFNode  *remote;
2306     PetscSection leafSection;
2307 
2308     if (processSF) {
2309       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2310       procSF = processSF;
2311     } else {
2312       ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr);
2313       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2314       ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr);
2315       ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2316     }
2317 
2318     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2319     ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2320     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2321     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2322     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2323   }
2324 
2325   for (p = 0; p < leafSize; p++) {
2326     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2327   }
2328 
2329   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2330   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2331   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2332   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2333   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2334   PetscFunctionReturn(0);
2335 }
2336 
2337 /*@
2338   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2339 
2340   Input Parameters:
2341 + dm    - The DM
2342 . label - DMLabel assinging ranks to remote roots
2343 
2344   Output Parameter:
2345 - sf    - The star forest communication context encapsulating the defined mapping
2346 
2347   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2348 
2349   Level: developer
2350 
2351 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2352 @*/
2353 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2354 {
2355   PetscMPIInt     rank, size;
2356   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2357   PetscSFNode    *remotePoints;
2358   IS              remoteRootIS;
2359   const PetscInt *remoteRoots;
2360   PetscErrorCode ierr;
2361 
2362   PetscFunctionBegin;
2363   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2364   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2365 
2366   for (numRemote = 0, n = 0; n < size; ++n) {
2367     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2368     numRemote += numPoints;
2369   }
2370   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2371   /* Put owned points first */
2372   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2373   if (numPoints > 0) {
2374     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2375     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2376     for (p = 0; p < numPoints; p++) {
2377       remotePoints[idx].index = remoteRoots[p];
2378       remotePoints[idx].rank = rank;
2379       idx++;
2380     }
2381     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2382     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2383   }
2384   /* Now add remote points */
2385   for (n = 0; n < size; ++n) {
2386     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2387     if (numPoints <= 0 || n == rank) continue;
2388     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2389     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2390     for (p = 0; p < numPoints; p++) {
2391       remotePoints[idx].index = remoteRoots[p];
2392       remotePoints[idx].rank = n;
2393       idx++;
2394     }
2395     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2396     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2397   }
2398   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2399   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2400   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2401   PetscFunctionReturn(0);
2402 }
2403 
2404 /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
2405  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
2406  * them out in that case. */
2407 #if defined(PETSC_HAVE_PARMETIS)
2408 /*@C
2409 
2410   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).
2411 
2412   Input parameters:
2413   + dm                - The DMPlex object.
2414   + n                 - The number of points.
2415   + pointsToRewrite   - The points in the SF whose ownership will change.
2416   + targetOwners      - New owner for each element in pointsToRewrite.
2417   + degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.
2418 
2419   Level: developer
2420 
2421 @*/
2422 static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
2423 {
2424   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
2425   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
2426   PetscSFNode  *leafLocationsNew;
2427   const         PetscSFNode *iremote;
2428   const         PetscInt *ilocal;
2429   PetscBool    *isLeaf;
2430   PetscSF       sf;
2431   MPI_Comm      comm;
2432   PetscMPIInt   rank, size;
2433 
2434   PetscFunctionBegin;
2435   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2436   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2437   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2438   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2439 
2440   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2441   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2442   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2443   for (i=0; i<pEnd-pStart; i++) {
2444     isLeaf[i] = PETSC_FALSE;
2445   }
2446   for (i=0; i<nleafs; i++) {
2447     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2448   }
2449 
2450   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
2451   cumSumDegrees[0] = 0;
2452   for (i=1; i<=pEnd-pStart; i++) {
2453     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
2454   }
2455   sumDegrees = cumSumDegrees[pEnd-pStart];
2456   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */
2457 
2458   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
2459   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
2460   for (i=0; i<pEnd-pStart; i++) {
2461     rankOnLeafs[i] = rank;
2462   }
2463   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2464   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
2465   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
2466 
2467   /* get the remote local points of my leaves */
2468   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
2469   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
2470   for (i=0; i<pEnd-pStart; i++) {
2471     points[i] = pStart+i;
2472   }
2473   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2474   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
2475   ierr = PetscFree(points);CHKERRQ(ierr);
2476   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
2477   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
2478   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
2479   for (i=0; i<pEnd-pStart; i++) {
2480     newOwners[i] = -1;
2481     newNumbers[i] = -1;
2482   }
2483   {
2484     PetscInt oldNumber, newNumber, oldOwner, newOwner;
2485     for (i=0; i<n; i++) {
2486       oldNumber = pointsToRewrite[i];
2487       newNumber = -1;
2488       oldOwner = rank;
2489       newOwner = targetOwners[i];
2490       if (oldOwner == newOwner) {
2491         newNumber = oldNumber;
2492       } else {
2493         for (j=0; j<degrees[oldNumber]; j++) {
2494           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
2495             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
2496             break;
2497           }
2498         }
2499       }
2500       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");
2501 
2502       newOwners[oldNumber] = newOwner;
2503       newNumbers[oldNumber] = newNumber;
2504     }
2505   }
2506   ierr = PetscFree(cumSumDegrees);CHKERRQ(ierr);
2507   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
2508   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
2509 
2510   ierr = PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2511   ierr = PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);CHKERRQ(ierr);
2512   ierr = PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2513   ierr = PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);CHKERRQ(ierr);
2514 
2515   /* Now count how many leafs we have on each processor. */
2516   leafCounter=0;
2517   for (i=0; i<pEnd-pStart; i++) {
2518     if (newOwners[i] >= 0) {
2519       if (newOwners[i] != rank) {
2520         leafCounter++;
2521       }
2522     } else {
2523       if (isLeaf[i]) {
2524         leafCounter++;
2525       }
2526     }
2527   }
2528 
2529   /* Now set up the new sf by creating the leaf arrays */
2530   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
2531   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
2532 
2533   leafCounter = 0;
2534   counter = 0;
2535   for (i=0; i<pEnd-pStart; i++) {
2536     if (newOwners[i] >= 0) {
2537       if (newOwners[i] != rank) {
2538         leafsNew[leafCounter] = i;
2539         leafLocationsNew[leafCounter].rank = newOwners[i];
2540         leafLocationsNew[leafCounter].index = newNumbers[i];
2541         leafCounter++;
2542       }
2543     } else {
2544       if (isLeaf[i]) {
2545         leafsNew[leafCounter] = i;
2546         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
2547         leafLocationsNew[leafCounter].index = iremote[counter].index;
2548         leafCounter++;
2549       }
2550     }
2551     if (isLeaf[i]) {
2552       counter++;
2553     }
2554   }
2555 
2556   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
2557   ierr = PetscFree(newOwners);CHKERRQ(ierr);
2558   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
2559   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
2560   PetscFunctionReturn(0);
2561 }
2562 
2563 static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
2564 {
2565   PetscInt *distribution, min, max, sum, i, ierr;
2566   PetscMPIInt rank, size;
2567   PetscFunctionBegin;
2568   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2569   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2570   ierr = PetscCalloc1(size, &distribution);CHKERRQ(ierr);
2571   for (i=0; i<n; i++) {
2572     if (part) distribution[part[i]] += vtxwgt[skip*i];
2573     else distribution[rank] += vtxwgt[skip*i];
2574   }
2575   ierr = MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2576   min = distribution[0];
2577   max = distribution[0];
2578   sum = distribution[0];
2579   for (i=1; i<size; i++) {
2580     if (distribution[i]<min) min=distribution[i];
2581     if (distribution[i]>max) max=distribution[i];
2582     sum += distribution[i];
2583   }
2584   ierr = PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);CHKERRQ(ierr);
2585   ierr = PetscFree(distribution);CHKERRQ(ierr);
2586   PetscFunctionReturn(0);
2587 }
2588 
2589 #endif
2590 
2591 /*@
2592   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.
2593 
2594   Input parameters:
2595   + dm               - The DMPlex object.
2596   + entityDepth      - depth of the entity to balance (0 -> balance vertices).
2597   + useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
2598   + parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.
2599 
2600   Level: user
2601 
2602 @*/
2603 
2604 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel)
2605 {
2606 #if defined(PETSC_HAVE_PARMETIS)
2607   PetscSF     sf;
2608   PetscInt    ierr, i, j, idx, jdx;
2609   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
2610   const       PetscInt *degrees, *ilocal;
2611   const       PetscSFNode *iremote;
2612   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
2613   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
2614   PetscMPIInt rank, size;
2615   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
2616   const       PetscInt *cumSumVertices;
2617   PetscInt    offset, counter;
2618   PetscInt    lenadjncy;
2619   PetscInt    *xadj, *adjncy, *vtxwgt;
2620   PetscInt    lenxadj;
2621   PetscInt    *adjwgt = NULL;
2622   PetscInt    *part, *options;
2623   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
2624   real_t      *ubvec;
2625   PetscInt    *firstVertices, *renumbering;
2626   PetscInt    failed, failedGlobal;
2627   MPI_Comm    comm;
2628   Mat         A, Apre;
2629   const char *prefix = NULL;
2630   PetscViewer       viewer;
2631   PetscViewerFormat format;
2632   PetscLayout layout;
2633 
2634   PetscFunctionBegin;
2635   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2636   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2637   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2638   if (size==1) PetscFunctionReturn(0);
2639 
2640   ierr = PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
2641 
2642   ierr = PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);CHKERRQ(ierr);
2643   if (viewer) {
2644     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
2645   }
2646 
2647   /* Figure out all points in the plex that we are interested in balancing. */
2648   ierr = DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);CHKERRQ(ierr);
2649   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2650   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
2651 
2652   for (i=0; i<pEnd-pStart; i++) {
2653     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
2654   }
2655 
2656   /* There are three types of points:
2657    * exclusivelyOwned: points that are owned by this process and only seen by this process
2658    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
2659    * leaf: a point that is seen by this process but owned by a different process
2660    */
2661   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
2662   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
2663   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
2664   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
2665   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
2666   for (i=0; i<pEnd-pStart; i++) {
2667     isNonExclusivelyOwned[i] = PETSC_FALSE;
2668     isExclusivelyOwned[i] = PETSC_FALSE;
2669     isLeaf[i] = PETSC_FALSE;
2670   }
2671 
2672   /* start by marking all the leafs */
2673   for (i=0; i<nleafs; i++) {
2674     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2675   }
2676 
2677   /* for an owned point, we can figure out whether another processor sees it or
2678    * not by calculating its degree */
2679   ierr = PetscSFComputeDegreeBegin(sf, &degrees);CHKERRQ(ierr);
2680   ierr = PetscSFComputeDegreeEnd(sf, &degrees);CHKERRQ(ierr);
2681 
2682   numExclusivelyOwned = 0;
2683   numNonExclusivelyOwned = 0;
2684   for (i=0; i<pEnd-pStart; i++) {
2685     if (toBalance[i]) {
2686       if (degrees[i] > 0) {
2687         isNonExclusivelyOwned[i] = PETSC_TRUE;
2688         numNonExclusivelyOwned += 1;
2689       } else {
2690         if (!isLeaf[i]) {
2691           isExclusivelyOwned[i] = PETSC_TRUE;
2692           numExclusivelyOwned += 1;
2693         }
2694       }
2695     }
2696   }
2697 
2698   /* We are going to build a graph with one vertex per core representing the
2699    * exclusively owned points and then one vertex per nonExclusively owned
2700    * point. */
2701 
2702   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2703   ierr = PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);CHKERRQ(ierr);
2704   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2705   ierr = PetscLayoutGetRanges(layout, &cumSumVertices);CHKERRQ(ierr);
2706 
2707   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
2708   offset = cumSumVertices[rank];
2709   counter = 0;
2710   for (i=0; i<pEnd-pStart; i++) {
2711     if (toBalance[i]) {
2712       if (degrees[i] > 0) {
2713         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
2714         counter++;
2715       }
2716     }
2717   }
2718 
2719   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
2720   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
2721   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
2722   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
2723 
2724   /* Now start building the data structure for ParMETIS */
2725 
2726   ierr = MatCreate(comm, &Apre);CHKERRQ(ierr);
2727   ierr = MatSetType(Apre, MATPREALLOCATOR);CHKERRQ(ierr);
2728   ierr = MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
2729   ierr = MatSetUp(Apre);CHKERRQ(ierr);
2730 
2731   for (i=0; i<pEnd-pStart; i++) {
2732     if (toBalance[i]) {
2733       idx = cumSumVertices[rank];
2734       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
2735       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
2736       else continue;
2737       ierr = MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
2738       ierr = MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
2739     }
2740   }
2741 
2742   ierr = MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2743   ierr = MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2744 
2745   ierr = MatCreate(comm, &A);CHKERRQ(ierr);
2746   ierr = MatSetType(A, MATMPIAIJ);CHKERRQ(ierr);
2747   ierr = MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);CHKERRQ(ierr);
2748   ierr = MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);CHKERRQ(ierr);
2749   ierr = MatDestroy(&Apre);CHKERRQ(ierr);
2750 
2751   for (i=0; i<pEnd-pStart; i++) {
2752     if (toBalance[i]) {
2753       idx = cumSumVertices[rank];
2754       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
2755       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
2756       else continue;
2757       ierr = MatSetValue(A, idx, jdx, 1, INSERT_VALUES);CHKERRQ(ierr);
2758       ierr = MatSetValue(A, jdx, idx, 1, INSERT_VALUES);CHKERRQ(ierr);
2759     }
2760   }
2761 
2762   ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2763   ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2764   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
2765   ierr = PetscFree(globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
2766 
2767   nparts = size;
2768   wgtflag = 2;
2769   numflag = 0;
2770   ncon = 2;
2771   real_t *tpwgts;
2772   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
2773   for (i=0; i<ncon*nparts; i++) {
2774     tpwgts[i] = 1./(nparts);
2775   }
2776 
2777   ierr = PetscMalloc1(ncon, &ubvec);CHKERRQ(ierr);
2778   ubvec[0] = 1.01;
2779   ubvec[1] = 1.01;
2780   lenadjncy = 0;
2781   for (i=0; i<1+numNonExclusivelyOwned; i++) {
2782     PetscInt temp=0;
2783     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
2784     lenadjncy += temp;
2785     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);CHKERRQ(ierr);
2786   }
2787   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
2788   lenxadj = 2 + numNonExclusivelyOwned;
2789   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
2790   xadj[0] = 0;
2791   counter = 0;
2792   for (i=0; i<1+numNonExclusivelyOwned; i++) {
2793     PetscInt        temp=0;
2794     const PetscInt *cols;
2795     ierr = MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
2796     ierr = PetscMemcpy(&adjncy[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
2797     counter += temp;
2798     xadj[i+1] = counter;
2799     ierr = MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);CHKERRQ(ierr);
2800   }
2801 
2802   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
2803   ierr = PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);CHKERRQ(ierr);
2804   vtxwgt[0] = numExclusivelyOwned;
2805   if (ncon>1) vtxwgt[1] = 1;
2806   for (i=0; i<numNonExclusivelyOwned; i++) {
2807     vtxwgt[ncon*(i+1)] = 1;
2808     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
2809   }
2810 
2811   if (viewer) {
2812     ierr = PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);CHKERRQ(ierr);
2813     ierr = PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);CHKERRQ(ierr);
2814   }
2815   if (parallel) {
2816     ierr = PetscMalloc1(4, &options);CHKERRQ(ierr);
2817     options[0] = 1;
2818     options[1] = 0; /* Verbosity */
2819     options[2] = 0; /* Seed */
2820     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
2821     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n");CHKERRQ(ierr); }
2822     if (useInitialGuess) {
2823       if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n");CHKERRQ(ierr); }
2824       PetscStackPush("ParMETIS_V3_RefineKway");
2825       ierr = ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
2826       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
2827       PetscStackPop;
2828     } else {
2829       PetscStackPush("ParMETIS_V3_PartKway");
2830       ierr = ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
2831       PetscStackPop;
2832       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
2833     }
2834     ierr = PetscFree(options);CHKERRQ(ierr);
2835   } else {
2836     if (viewer) { ierr = PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n");CHKERRQ(ierr); }
2837     Mat As;
2838     PetscInt numRows;
2839     PetscInt *partGlobal;
2840     ierr = MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);CHKERRQ(ierr);
2841 
2842     PetscInt *numExclusivelyOwnedAll;
2843     ierr = PetscMalloc1(size, &numExclusivelyOwnedAll);CHKERRQ(ierr);
2844     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
2845     ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);CHKERRQ(ierr);
2846 
2847     ierr = MatGetSize(As, &numRows, NULL);CHKERRQ(ierr);
2848     ierr = PetscMalloc1(numRows, &partGlobal);CHKERRQ(ierr);
2849     if (!rank) {
2850       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
2851       lenadjncy = 0;
2852 
2853       for (i=0; i<numRows; i++) {
2854         PetscInt temp=0;
2855         ierr = MatGetRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
2856         lenadjncy += temp;
2857         ierr = MatRestoreRow(As, i, &temp, NULL, NULL);CHKERRQ(ierr);
2858       }
2859       ierr = PetscMalloc1(lenadjncy, &adjncy_g);CHKERRQ(ierr);
2860       lenxadj = 1 + numRows;
2861       ierr = PetscMalloc1(lenxadj, &xadj_g);CHKERRQ(ierr);
2862       xadj_g[0] = 0;
2863       counter = 0;
2864       for (i=0; i<numRows; i++) {
2865         PetscInt        temp=0;
2866         const PetscInt *cols;
2867         ierr = MatGetRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
2868         ierr = PetscMemcpy(&adjncy_g[counter], cols, temp*sizeof(PetscInt));CHKERRQ(ierr);
2869         counter += temp;
2870         xadj_g[i+1] = counter;
2871         ierr = MatRestoreRow(As, i, &temp, &cols, NULL);CHKERRQ(ierr);
2872       }
2873       ierr = PetscMalloc1(2*numRows, &vtxwgt_g);CHKERRQ(ierr);
2874       for (i=0; i<size; i++){
2875         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
2876         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
2877         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
2878           vtxwgt_g[ncon*j] = 1;
2879           if (ncon>1) vtxwgt_g[2*j+1] = 0;
2880         }
2881       }
2882       ierr = PetscMalloc1(64, &options);CHKERRQ(ierr);
2883       ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
2884       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
2885       options[METIS_OPTION_CONTIG] = 1;
2886       PetscStackPush("METIS_PartGraphKway");
2887       ierr = METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
2888       PetscStackPop;
2889       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
2890       ierr = PetscFree(options);CHKERRQ(ierr);
2891       ierr = PetscFree(xadj_g);CHKERRQ(ierr);
2892       ierr = PetscFree(adjncy_g);CHKERRQ(ierr);
2893       ierr = PetscFree(vtxwgt_g);CHKERRQ(ierr);
2894     }
2895     ierr = PetscFree(numExclusivelyOwnedAll);CHKERRQ(ierr);
2896 
2897     /* Now scatter the parts array. */
2898     {
2899       PetscMPIInt *counts, *mpiCumSumVertices;
2900       ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
2901       ierr = PetscMalloc1(size+1, &mpiCumSumVertices);CHKERRQ(ierr);
2902       for(i=0; i<size; i++) {
2903         ierr = PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));CHKERRQ(ierr);
2904       }
2905       for(i=0; i<=size; i++) {
2906         ierr = PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));CHKERRQ(ierr);
2907       }
2908       ierr = MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);CHKERRQ(ierr);
2909       ierr = PetscFree(counts);CHKERRQ(ierr);
2910       ierr = PetscFree(mpiCumSumVertices);CHKERRQ(ierr);
2911     }
2912 
2913     ierr = PetscFree(partGlobal);CHKERRQ(ierr);
2914     ierr = MatDestroy(&As);CHKERRQ(ierr);
2915   }
2916 
2917   ierr = MatDestroy(&A);CHKERRQ(ierr);
2918   ierr = PetscFree(ubvec);CHKERRQ(ierr);
2919   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
2920 
2921   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */
2922 
2923   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
2924   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
2925   firstVertices[rank] = part[0];
2926   ierr = MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
2927   for (i=0; i<size; i++) {
2928     renumbering[firstVertices[i]] = i;
2929   }
2930   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
2931     part[i] = renumbering[part[i]];
2932   }
2933   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
2934   failed = (PetscInt)(part[0] != rank);
2935   ierr = MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2936 
2937   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
2938   ierr = PetscFree(renumbering);CHKERRQ(ierr);
2939 
2940   if (failedGlobal > 0) {
2941     ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2942     ierr = PetscFree(xadj);CHKERRQ(ierr);
2943     ierr = PetscFree(adjncy);CHKERRQ(ierr);
2944     ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
2945     ierr = PetscFree(toBalance);CHKERRQ(ierr);
2946     ierr = PetscFree(isLeaf);CHKERRQ(ierr);
2947     ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
2948     ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
2949     ierr = PetscFree(part);CHKERRQ(ierr);
2950     if (viewer) {
2951       ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
2952       ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
2953     }
2954     ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
2955     PetscFunctionReturn(1);
2956   }
2957 
2958   /*Let's check how well we did distributing points*/
2959   if (viewer) {
2960     ierr = PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);CHKERRQ(ierr);
2961     ierr = PetscViewerASCIIPrintf(viewer, "Initial.     ");CHKERRQ(ierr);
2962     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);CHKERRQ(ierr);
2963     ierr = PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");CHKERRQ(ierr);
2964     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
2965   }
2966 
2967   /* Now check that every vertex is owned by a process that it is actually connected to. */
2968   for (i=1; i<=numNonExclusivelyOwned; i++) {
2969     PetscInt loc = 0;
2970     ierr = PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);CHKERRQ(ierr);
2971     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
2972     if (loc<0) {
2973       part[i] = rank;
2974     }
2975   }
2976 
2977   /* Let's see how significant the influences of the previous fixing up step was.*/
2978   if (viewer) {
2979     ierr = PetscViewerASCIIPrintf(viewer, "After.       ");CHKERRQ(ierr);
2980     ierr = DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);CHKERRQ(ierr);
2981   }
2982 
2983   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2984   ierr = PetscFree(xadj);CHKERRQ(ierr);
2985   ierr = PetscFree(adjncy);CHKERRQ(ierr);
2986   ierr = PetscFree(vtxwgt);CHKERRQ(ierr);
2987 
2988   /* Almost done, now rewrite the SF to reflect the new ownership. */
2989   {
2990     PetscInt *pointsToRewrite;
2991     ierr = PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);CHKERRQ(ierr);
2992     counter = 0;
2993     for(i=0; i<pEnd-pStart; i++) {
2994       if (toBalance[i]) {
2995         if (isNonExclusivelyOwned[i]) {
2996           pointsToRewrite[counter] = i + pStart;
2997           counter++;
2998         }
2999       }
3000     }
3001     ierr = DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);CHKERRQ(ierr);
3002     ierr = PetscFree(pointsToRewrite);CHKERRQ(ierr);
3003   }
3004 
3005   ierr = PetscFree(toBalance);CHKERRQ(ierr);
3006   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
3007   ierr = PetscFree(isNonExclusivelyOwned);CHKERRQ(ierr);
3008   ierr = PetscFree(isExclusivelyOwned);CHKERRQ(ierr);
3009   ierr = PetscFree(part);CHKERRQ(ierr);
3010   if (viewer) {
3011     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
3012     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
3013   }
3014   ierr = PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);CHKERRQ(ierr);
3015 #else
3016   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3017 #endif
3018   PetscFunctionReturn(0);
3019 }
3020