xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 783e1fb6b8593a8bc7cde5361a2f84cbf19ea68e)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 /*@C
31   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
32 
33   Input Parameters:
34 + dm      - The mesh DM dm
35 - height  - Height of the strata from which to construct the graph
36 
37   Output Parameter:
38 + numVertices     - Number of vertices in the graph
39 . offsets         - Point offsets in the graph
40 . adjacency       - Point connectivity in the graph
41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
42 
43   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscInt       *adjCells = NULL, *remoteCells = NULL;
61   const PetscInt *local;
62   PetscInt       nroots, nleaves, l;
63   PetscMPIInt    rank;
64   PetscErrorCode ierr;
65 
66   PetscFunctionBegin;
67   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
68   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
69   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
70   if (dim != depth) {
71     /* We do not handle the uninterpolated case here */
72     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
73     /* DMPlexCreateNeighborCSR does not make a numbering */
74     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
75     /* Different behavior for empty graphs */
76     if (!*numVertices) {
77       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
78       (*offsets)[0] = 0;
79     }
80     /* Broken in parallel */
81     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
82     PetscFunctionReturn(0);
83   }
84   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
85   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
86   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
87   /* Build adjacency graph via a section/segbuffer */
88   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
89   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
90   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
91   /* Always use FVM adjacency to create partitioner graph */
92   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
93   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
94   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
95   if (globalNumbering) {
96     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
97     *globalNumbering = cellNumbering;
98   }
99   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
100   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
101      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
102    */
103   ierr = PetscSFGetGraph(dm->sf, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
104   if (nroots >= 0) {
105     PetscInt fStart, fEnd, f;
106 
107     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
108     ierr = DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd);CHKERRQ(ierr);
109     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
110     for (f = fStart; f < fEnd; ++f) {
111       const PetscInt *support;
112       PetscInt        supportSize;
113 
114       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
115       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
116       if (supportSize == 1) adjCells[f] = cellNum[support[0]];
117       else if (supportSize == 2) {
118         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
119         if (p >= 0) adjCells[f] = cellNum[support[1]];
120         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
121         if (p >= 0) adjCells[f] = cellNum[support[0]];
122       }
123     }
124     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
125     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
126     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
127     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
128     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
129   }
130   /* Combine local and global adjacencies */
131   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
132     const PetscInt *cone;
133     PetscInt        coneSize, c;
134 
135     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
136     if (nroots > 0) {if (cellNum[p] < 0) continue;}
137     /* Add remote cells */
138     if (remoteCells) {
139       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
140       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
141       for (c = 0; c < coneSize; ++c) {
142         if (remoteCells[cone[c]] != -1) {
143           PetscInt *PETSC_RESTRICT pBuf;
144 
145           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
146           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
147           *pBuf = remoteCells[cone[c]];
148         }
149       }
150     }
151     /* Add local cells */
152     adjSize = PETSC_DETERMINE;
153     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
154     for (a = 0; a < adjSize; ++a) {
155       const PetscInt point = adj[a];
156       if (point != p && pStart <= point && point < pEnd) {
157         PetscInt *PETSC_RESTRICT pBuf;
158         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
159         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
160         *pBuf = cellNum[point];
161       }
162     }
163     (*numVertices)++;
164   }
165   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
166   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
167   /* Derive CSR graph from section/segbuffer */
168   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
169   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
170   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
171   for (idx = 0, p = pStart; p < pEnd; p++) {
172     if (nroots > 0) {if (cellNum[p] < 0) continue;}
173     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
174   }
175   vOffsets[*numVertices] = size;
176   if (offsets) *offsets = vOffsets;
177   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
178   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
179   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
180   if (adjacency) *adjacency = graph;
181   /* Clean up */
182   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
183   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
184   ierr = PetscFree(adj);CHKERRQ(ierr);
185   PetscFunctionReturn(0);
186 }
187 
188 /*@C
189   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
190 
191   Collective
192 
193   Input Arguments:
194 + dm - The DMPlex
195 - cellHeight - The height of mesh points to treat as cells (default should be 0)
196 
197   Output Arguments:
198 + numVertices - The number of local vertices in the graph, or cells in the mesh.
199 . offsets     - The offset to the adjacency list for each cell
200 - adjacency   - The adjacency list for all cells
201 
202   Note: This is suitable for input to a mesh partitioner like ParMetis.
203 
204   Level: advanced
205 
206 .seealso: DMPlexCreate()
207 @*/
208 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
209 {
210   const PetscInt maxFaceCases = 30;
211   PetscInt       numFaceCases = 0;
212   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
213   PetscInt      *off, *adj;
214   PetscInt      *neighborCells = NULL;
215   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
216   PetscErrorCode ierr;
217 
218   PetscFunctionBegin;
219   /* For parallel partitioning, I think you have to communicate supports */
220   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
221   cellDim = dim - cellHeight;
222   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
223   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
224   if (cEnd - cStart == 0) {
225     if (numVertices) *numVertices = 0;
226     if (offsets)   *offsets   = NULL;
227     if (adjacency) *adjacency = NULL;
228     PetscFunctionReturn(0);
229   }
230   numCells  = cEnd - cStart;
231   faceDepth = depth - cellHeight;
232   if (dim == depth) {
233     PetscInt f, fStart, fEnd;
234 
235     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
236     /* Count neighboring cells */
237     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
238     for (f = fStart; f < fEnd; ++f) {
239       const PetscInt *support;
240       PetscInt        supportSize;
241       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
242       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
243       if (supportSize == 2) {
244         PetscInt numChildren;
245 
246         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
247         if (!numChildren) {
248           ++off[support[0]-cStart+1];
249           ++off[support[1]-cStart+1];
250         }
251       }
252     }
253     /* Prefix sum */
254     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
255     if (adjacency) {
256       PetscInt *tmp;
257 
258       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
259       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
260       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
261       /* Get neighboring cells */
262       for (f = fStart; f < fEnd; ++f) {
263         const PetscInt *support;
264         PetscInt        supportSize;
265         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
266         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
267         if (supportSize == 2) {
268           PetscInt numChildren;
269 
270           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
271           if (!numChildren) {
272             adj[tmp[support[0]-cStart]++] = support[1];
273             adj[tmp[support[1]-cStart]++] = support[0];
274           }
275         }
276       }
277 #if defined(PETSC_USE_DEBUG)
278       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
279 #endif
280       ierr = PetscFree(tmp);CHKERRQ(ierr);
281     }
282     if (numVertices) *numVertices = numCells;
283     if (offsets)   *offsets   = off;
284     if (adjacency) *adjacency = adj;
285     PetscFunctionReturn(0);
286   }
287   /* Setup face recognition */
288   if (faceDepth == 1) {
289     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
290 
291     for (c = cStart; c < cEnd; ++c) {
292       PetscInt corners;
293 
294       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
295       if (!cornersSeen[corners]) {
296         PetscInt nFV;
297 
298         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
299         cornersSeen[corners] = 1;
300 
301         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
302 
303         numFaceVertices[numFaceCases++] = nFV;
304       }
305     }
306   }
307   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
308   /* Count neighboring cells */
309   for (cell = cStart; cell < cEnd; ++cell) {
310     PetscInt numNeighbors = PETSC_DETERMINE, n;
311 
312     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
313     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
314     for (n = 0; n < numNeighbors; ++n) {
315       PetscInt        cellPair[2];
316       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
317       PetscInt        meetSize = 0;
318       const PetscInt *meet    = NULL;
319 
320       cellPair[0] = cell; cellPair[1] = neighborCells[n];
321       if (cellPair[0] == cellPair[1]) continue;
322       if (!found) {
323         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
324         if (meetSize) {
325           PetscInt f;
326 
327           for (f = 0; f < numFaceCases; ++f) {
328             if (numFaceVertices[f] == meetSize) {
329               found = PETSC_TRUE;
330               break;
331             }
332           }
333         }
334         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
335       }
336       if (found) ++off[cell-cStart+1];
337     }
338   }
339   /* Prefix sum */
340   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
341 
342   if (adjacency) {
343     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
344     /* Get neighboring cells */
345     for (cell = cStart; cell < cEnd; ++cell) {
346       PetscInt numNeighbors = PETSC_DETERMINE, n;
347       PetscInt cellOffset   = 0;
348 
349       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
350       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
351       for (n = 0; n < numNeighbors; ++n) {
352         PetscInt        cellPair[2];
353         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
354         PetscInt        meetSize = 0;
355         const PetscInt *meet    = NULL;
356 
357         cellPair[0] = cell; cellPair[1] = neighborCells[n];
358         if (cellPair[0] == cellPair[1]) continue;
359         if (!found) {
360           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
361           if (meetSize) {
362             PetscInt f;
363 
364             for (f = 0; f < numFaceCases; ++f) {
365               if (numFaceVertices[f] == meetSize) {
366                 found = PETSC_TRUE;
367                 break;
368               }
369             }
370           }
371           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
372         }
373         if (found) {
374           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
375           ++cellOffset;
376         }
377       }
378     }
379   }
380   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
381   if (numVertices) *numVertices = numCells;
382   if (offsets)   *offsets   = off;
383   if (adjacency) *adjacency = adj;
384   PetscFunctionReturn(0);
385 }
386 
387 /*@C
388   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
389 
390   Not Collective
391 
392   Input Parameters:
393 + name        - The name of a new user-defined creation routine
394 - create_func - The creation routine itself
395 
396   Notes:
397   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
398 
399   Sample usage:
400 .vb
401     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
402 .ve
403 
404   Then, your PetscPartitioner type can be chosen with the procedural interface via
405 .vb
406     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
407     PetscPartitionerSetType(PetscPartitioner, "my_part");
408 .ve
409    or at runtime via the option
410 .vb
411     -petscpartitioner_type my_part
412 .ve
413 
414   Level: advanced
415 
416 .keywords: PetscPartitioner, register
417 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
418 
419 @*/
420 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
421 {
422   PetscErrorCode ierr;
423 
424   PetscFunctionBegin;
425   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
426   PetscFunctionReturn(0);
427 }
428 
429 /*@C
430   PetscPartitionerSetType - Builds a particular PetscPartitioner
431 
432   Collective on PetscPartitioner
433 
434   Input Parameters:
435 + part - The PetscPartitioner object
436 - name - The kind of partitioner
437 
438   Options Database Key:
439 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
440 
441   Level: intermediate
442 
443 .keywords: PetscPartitioner, set, type
444 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
445 @*/
446 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
447 {
448   PetscErrorCode (*r)(PetscPartitioner);
449   PetscBool      match;
450   PetscErrorCode ierr;
451 
452   PetscFunctionBegin;
453   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
454   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
455   if (match) PetscFunctionReturn(0);
456 
457   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
458   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
459   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
460 
461   if (part->ops->destroy) {
462     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
463   }
464   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
465   ierr = (*r)(part);CHKERRQ(ierr);
466   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
467   PetscFunctionReturn(0);
468 }
469 
470 /*@C
471   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
472 
473   Not Collective
474 
475   Input Parameter:
476 . part - The PetscPartitioner
477 
478   Output Parameter:
479 . name - The PetscPartitioner type name
480 
481   Level: intermediate
482 
483 .keywords: PetscPartitioner, get, type, name
484 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
485 @*/
486 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
487 {
488   PetscErrorCode ierr;
489 
490   PetscFunctionBegin;
491   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
492   PetscValidPointer(name, 2);
493   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
494   *name = ((PetscObject) part)->type_name;
495   PetscFunctionReturn(0);
496 }
497 
498 /*@C
499   PetscPartitionerView - Views a PetscPartitioner
500 
501   Collective on PetscPartitioner
502 
503   Input Parameter:
504 + part - the PetscPartitioner object to view
505 - v    - the viewer
506 
507   Level: developer
508 
509 .seealso: PetscPartitionerDestroy()
510 @*/
511 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
512 {
513   PetscMPIInt    size;
514   PetscBool      isascii;
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
519   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
520   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
521   if (isascii) {
522     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
523     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
524     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
525     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
526     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
527     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
528     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
529   }
530   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
531   PetscFunctionReturn(0);
532 }
533 
534 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
535 {
536   PetscFunctionBegin;
537   if (!currentType) {
538 #if defined(PETSC_HAVE_CHACO)
539     *defaultType = PETSCPARTITIONERCHACO;
540 #elif defined(PETSC_HAVE_PARMETIS)
541     *defaultType = PETSCPARTITIONERPARMETIS;
542 #elif defined(PETSC_HAVE_PTSCOTCH)
543     *defaultType = PETSCPARTITIONERPTSCOTCH;
544 #else
545     *defaultType = PETSCPARTITIONERSIMPLE;
546 #endif
547   } else {
548     *defaultType = currentType;
549   }
550   PetscFunctionReturn(0);
551 }
552 
553 /*@
554   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
555 
556   Collective on PetscPartitioner
557 
558   Input Parameter:
559 . part - the PetscPartitioner object to set options for
560 
561   Level: developer
562 
563 .seealso: PetscPartitionerView()
564 @*/
565 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
566 {
567   const char    *defaultType;
568   char           name[256];
569   PetscBool      flg;
570   PetscErrorCode ierr;
571 
572   PetscFunctionBegin;
573   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
574   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
575   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
576   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
577   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
578   if (flg) {
579     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
580   } else if (!((PetscObject) part)->type_name) {
581     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
582   }
583   if (part->ops->setfromoptions) {
584     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
585   }
586   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
587   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
588   /* process any options handlers added with PetscObjectAddOptionsHandler() */
589   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
590   ierr = PetscOptionsEnd();CHKERRQ(ierr);
591   PetscFunctionReturn(0);
592 }
593 
594 /*@C
595   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
596 
597   Collective on PetscPartitioner
598 
599   Input Parameter:
600 . part - the PetscPartitioner object to setup
601 
602   Level: developer
603 
604 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
605 @*/
606 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
607 {
608   PetscErrorCode ierr;
609 
610   PetscFunctionBegin;
611   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
612   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
613   PetscFunctionReturn(0);
614 }
615 
616 /*@
617   PetscPartitionerDestroy - Destroys a PetscPartitioner object
618 
619   Collective on PetscPartitioner
620 
621   Input Parameter:
622 . part - the PetscPartitioner object to destroy
623 
624   Level: developer
625 
626 .seealso: PetscPartitionerView()
627 @*/
628 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
629 {
630   PetscErrorCode ierr;
631 
632   PetscFunctionBegin;
633   if (!*part) PetscFunctionReturn(0);
634   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
635 
636   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
637   ((PetscObject) (*part))->refct = 0;
638 
639   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
640   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
641   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
642   PetscFunctionReturn(0);
643 }
644 
645 /*@
646   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
647 
648   Collective on MPI_Comm
649 
650   Input Parameter:
651 . comm - The communicator for the PetscPartitioner object
652 
653   Output Parameter:
654 . part - The PetscPartitioner object
655 
656   Level: beginner
657 
658 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
659 @*/
660 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
661 {
662   PetscPartitioner p;
663   const char       *partitionerType = NULL;
664   PetscErrorCode   ierr;
665 
666   PetscFunctionBegin;
667   PetscValidPointer(part, 2);
668   *part = NULL;
669   ierr = DMInitializePackage();CHKERRQ(ierr);
670 
671   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
672   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
673   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
674 
675   p->edgeCut = 0;
676   p->balance = 0.0;
677 
678   *part = p;
679   PetscFunctionReturn(0);
680 }
681 
682 /*@
683   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
684 
685   Collective on DM
686 
687   Input Parameters:
688 + part    - The PetscPartitioner
689 - dm      - The mesh DM
690 
691   Output Parameters:
692 + partSection     - The PetscSection giving the division of points by partition
693 - partition       - The list of points by partition
694 
695   Options Database:
696 . -petscpartitioner_view_graph - View the graph we are partitioning
697 
698   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
699 
700   Level: developer
701 
702 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
703 @*/
704 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
705 {
706   PetscMPIInt    size;
707   PetscErrorCode ierr;
708 
709   PetscFunctionBegin;
710   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
711   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
712   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
713   PetscValidPointer(partition, 5);
714   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
715   if (size == 1) {
716     PetscInt *points;
717     PetscInt  cStart, cEnd, c;
718 
719     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
720     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
721     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
722     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
723     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
724     for (c = cStart; c < cEnd; ++c) points[c] = c;
725     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
726     PetscFunctionReturn(0);
727   }
728   if (part->height == 0) {
729     PetscInt numVertices;
730     PetscInt *start     = NULL;
731     PetscInt *adjacency = NULL;
732     IS       globalNumbering;
733 
734     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
735     if (part->viewGraph) {
736       PetscViewer viewer = part->viewerGraph;
737       PetscBool   isascii;
738       PetscInt    v, i;
739       PetscMPIInt rank;
740 
741       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
742       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
743       if (isascii) {
744         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
745         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
746         for (v = 0; v < numVertices; ++v) {
747           const PetscInt s = start[v];
748           const PetscInt e = start[v+1];
749 
750           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
751           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
752           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
753         }
754         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
755         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
756       }
757     }
758     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
759     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
760     ierr = PetscFree(start);CHKERRQ(ierr);
761     ierr = PetscFree(adjacency);CHKERRQ(ierr);
762     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
763       const PetscInt *globalNum;
764       const PetscInt *partIdx;
765       PetscInt *map, cStart, cEnd;
766       PetscInt *adjusted, i, localSize, offset;
767       IS    newPartition;
768 
769       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
770       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
771       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
772       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
773       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
774       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
775       for (i = cStart, offset = 0; i < cEnd; i++) {
776         if (globalNum[i - cStart] >= 0) map[offset++] = i;
777       }
778       for (i = 0; i < localSize; i++) {
779         adjusted[i] = map[partIdx[i]];
780       }
781       ierr = PetscFree(map);CHKERRQ(ierr);
782       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
783       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
784       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
785       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
786       ierr = ISDestroy(partition);CHKERRQ(ierr);
787       *partition = newPartition;
788     }
789   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
790   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
791   PetscFunctionReturn(0);
792 }
793 
794 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
795 {
796   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
797   PetscErrorCode          ierr;
798 
799   PetscFunctionBegin;
800   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
801   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
802   ierr = PetscFree(p);CHKERRQ(ierr);
803   PetscFunctionReturn(0);
804 }
805 
806 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
807 {
808   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
809   PetscErrorCode          ierr;
810 
811   PetscFunctionBegin;
812   if (p->random) {
813     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
814     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
815     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
816   }
817   PetscFunctionReturn(0);
818 }
819 
820 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
821 {
822   PetscBool      iascii;
823   PetscErrorCode ierr;
824 
825   PetscFunctionBegin;
826   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
827   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
828   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
829   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
830   PetscFunctionReturn(0);
831 }
832 
833 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
834 {
835   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
836   PetscErrorCode          ierr;
837 
838   PetscFunctionBegin;
839   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
840   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
841   ierr = PetscOptionsTail();CHKERRQ(ierr);
842   PetscFunctionReturn(0);
843 }
844 
845 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
846 {
847   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
848   PetscInt                np;
849   PetscErrorCode          ierr;
850 
851   PetscFunctionBegin;
852   if (p->random) {
853     PetscRandom r;
854     PetscInt   *sizes, *points, v, p;
855     PetscMPIInt rank;
856 
857     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
858     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
859     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
860     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
861     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
862     for (v = 0; v < numVertices; ++v) {points[v] = v;}
863     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
864     for (v = numVertices-1; v > 0; --v) {
865       PetscReal val;
866       PetscInt  w, tmp;
867 
868       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
869       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
870       w    = PetscFloorReal(val);
871       tmp       = points[v];
872       points[v] = points[w];
873       points[w] = tmp;
874     }
875     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
876     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
877     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
878   }
879   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
880   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
881   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
882   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
883   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
884   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
885   *partition = p->partition;
886   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
887   PetscFunctionReturn(0);
888 }
889 
890 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
891 {
892   PetscFunctionBegin;
893   part->ops->view           = PetscPartitionerView_Shell;
894   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
895   part->ops->destroy        = PetscPartitionerDestroy_Shell;
896   part->ops->partition      = PetscPartitionerPartition_Shell;
897   PetscFunctionReturn(0);
898 }
899 
900 /*MC
901   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
902 
903   Level: intermediate
904 
905 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
906 M*/
907 
908 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
909 {
910   PetscPartitioner_Shell *p;
911   PetscErrorCode          ierr;
912 
913   PetscFunctionBegin;
914   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
915   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
916   part->data = p;
917 
918   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
919   p->random = PETSC_FALSE;
920   PetscFunctionReturn(0);
921 }
922 
923 /*@C
924   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
925 
926   Collective on Part
927 
928   Input Parameters:
929 + part     - The PetscPartitioner
930 . size - The number of partitions
931 . sizes    - array of size size (or NULL) providing the number of points in each partition
932 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
933 
934   Level: developer
935 
936   Notes:
937 
938     It is safe to free the sizes and points arrays after use in this routine.
939 
940 .seealso DMPlexDistribute(), PetscPartitionerCreate()
941 @*/
942 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
943 {
944   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
945   PetscInt                proc, numPoints;
946   PetscErrorCode          ierr;
947 
948   PetscFunctionBegin;
949   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
950   if (sizes)  {PetscValidPointer(sizes, 3);}
951   if (points) {PetscValidPointer(points, 4);}
952   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
953   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
954   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
955   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
956   if (sizes) {
957     for (proc = 0; proc < size; ++proc) {
958       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
959     }
960   }
961   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
962   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
963   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
964   PetscFunctionReturn(0);
965 }
966 
967 /*@
968   PetscPartitionerShellSetRandom - Set the flag to use a random partition
969 
970   Collective on Part
971 
972   Input Parameters:
973 + part   - The PetscPartitioner
974 - random - The flag to use a random partition
975 
976   Level: intermediate
977 
978 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
979 @*/
980 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
981 {
982   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
983 
984   PetscFunctionBegin;
985   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
986   p->random = random;
987   PetscFunctionReturn(0);
988 }
989 
990 /*@
991   PetscPartitionerShellGetRandom - get the flag to use a random partition
992 
993   Collective on Part
994 
995   Input Parameter:
996 . part   - The PetscPartitioner
997 
998   Output Parameter
999 . random - The flag to use a random partition
1000 
1001   Level: intermediate
1002 
1003 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1004 @*/
1005 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1006 {
1007   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1008 
1009   PetscFunctionBegin;
1010   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1011   PetscValidPointer(random, 2);
1012   *random = p->random;
1013   PetscFunctionReturn(0);
1014 }
1015 
1016 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1017 {
1018   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1019   PetscErrorCode          ierr;
1020 
1021   PetscFunctionBegin;
1022   ierr = PetscFree(p);CHKERRQ(ierr);
1023   PetscFunctionReturn(0);
1024 }
1025 
1026 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1027 {
1028   PetscFunctionBegin;
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1033 {
1034   PetscBool      iascii;
1035   PetscErrorCode ierr;
1036 
1037   PetscFunctionBegin;
1038   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1039   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1040   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1041   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1042   PetscFunctionReturn(0);
1043 }
1044 
1045 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1046 {
1047   MPI_Comm       comm;
1048   PetscInt       np;
1049   PetscMPIInt    size;
1050   PetscErrorCode ierr;
1051 
1052   PetscFunctionBegin;
1053   comm = PetscObjectComm((PetscObject)part);
1054   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1055   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1056   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1057   if (size == 1) {
1058     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1059   } else {
1060     PetscMPIInt rank;
1061     PetscInt nvGlobal, *offsets, myFirst, myLast;
1062 
1063     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1064     offsets[0] = 0;
1065     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1066     for (np = 2; np <= size; np++) {
1067       offsets[np] += offsets[np-1];
1068     }
1069     nvGlobal = offsets[size];
1070     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1071     myFirst = offsets[rank];
1072     myLast  = offsets[rank + 1] - 1;
1073     ierr = PetscFree(offsets);CHKERRQ(ierr);
1074     if (numVertices) {
1075       PetscInt firstPart = 0, firstLargePart = 0;
1076       PetscInt lastPart = 0, lastLargePart = 0;
1077       PetscInt rem = nvGlobal % nparts;
1078       PetscInt pSmall = nvGlobal/nparts;
1079       PetscInt pBig = nvGlobal/nparts + 1;
1080 
1081 
1082       if (rem) {
1083         firstLargePart = myFirst / pBig;
1084         lastLargePart  = myLast  / pBig;
1085 
1086         if (firstLargePart < rem) {
1087           firstPart = firstLargePart;
1088         } else {
1089           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1090         }
1091         if (lastLargePart < rem) {
1092           lastPart = lastLargePart;
1093         } else {
1094           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1095         }
1096       } else {
1097         firstPart = myFirst / (nvGlobal/nparts);
1098         lastPart  = myLast  / (nvGlobal/nparts);
1099       }
1100 
1101       for (np = firstPart; np <= lastPart; np++) {
1102         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1103         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1104 
1105         PartStart = PetscMax(PartStart,myFirst);
1106         PartEnd   = PetscMin(PartEnd,myLast+1);
1107         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1108       }
1109     }
1110   }
1111   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1112   PetscFunctionReturn(0);
1113 }
1114 
1115 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1116 {
1117   PetscFunctionBegin;
1118   part->ops->view      = PetscPartitionerView_Simple;
1119   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1120   part->ops->partition = PetscPartitionerPartition_Simple;
1121   PetscFunctionReturn(0);
1122 }
1123 
1124 /*MC
1125   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1126 
1127   Level: intermediate
1128 
1129 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1130 M*/
1131 
1132 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1133 {
1134   PetscPartitioner_Simple *p;
1135   PetscErrorCode           ierr;
1136 
1137   PetscFunctionBegin;
1138   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1139   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1140   part->data = p;
1141 
1142   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1143   PetscFunctionReturn(0);
1144 }
1145 
1146 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1147 {
1148   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1149   PetscErrorCode          ierr;
1150 
1151   PetscFunctionBegin;
1152   ierr = PetscFree(p);CHKERRQ(ierr);
1153   PetscFunctionReturn(0);
1154 }
1155 
1156 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1157 {
1158   PetscFunctionBegin;
1159   PetscFunctionReturn(0);
1160 }
1161 
1162 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1163 {
1164   PetscBool      iascii;
1165   PetscErrorCode ierr;
1166 
1167   PetscFunctionBegin;
1168   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1169   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1170   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1171   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1172   PetscFunctionReturn(0);
1173 }
1174 
1175 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1176 {
1177   PetscInt       np;
1178   PetscErrorCode ierr;
1179 
1180   PetscFunctionBegin;
1181   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1182   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1183   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1184   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1185   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1186   PetscFunctionReturn(0);
1187 }
1188 
1189 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1190 {
1191   PetscFunctionBegin;
1192   part->ops->view      = PetscPartitionerView_Gather;
1193   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1194   part->ops->partition = PetscPartitionerPartition_Gather;
1195   PetscFunctionReturn(0);
1196 }
1197 
1198 /*MC
1199   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1200 
1201   Level: intermediate
1202 
1203 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1204 M*/
1205 
1206 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1207 {
1208   PetscPartitioner_Gather *p;
1209   PetscErrorCode           ierr;
1210 
1211   PetscFunctionBegin;
1212   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1213   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1214   part->data = p;
1215 
1216   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 
1221 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1222 {
1223   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1224   PetscErrorCode          ierr;
1225 
1226   PetscFunctionBegin;
1227   ierr = PetscFree(p);CHKERRQ(ierr);
1228   PetscFunctionReturn(0);
1229 }
1230 
1231 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1232 {
1233   PetscFunctionBegin;
1234   PetscFunctionReturn(0);
1235 }
1236 
1237 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1238 {
1239   PetscBool      iascii;
1240   PetscErrorCode ierr;
1241 
1242   PetscFunctionBegin;
1243   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1244   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1245   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1246   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1247   PetscFunctionReturn(0);
1248 }
1249 
1250 #if defined(PETSC_HAVE_CHACO)
1251 #if defined(PETSC_HAVE_UNISTD_H)
1252 #include <unistd.h>
1253 #endif
1254 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1255 #include <chaco.h>
1256 #else
1257 /* Older versions of Chaco do not have an include file */
1258 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1259                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1260                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1261                        int mesh_dims[3], double *goal, int global_method, int local_method,
1262                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1263 #endif
1264 extern int FREE_GRAPH;
1265 #endif
1266 
1267 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1268 {
1269 #if defined(PETSC_HAVE_CHACO)
1270   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1271   MPI_Comm       comm;
1272   int            nvtxs          = numVertices; /* number of vertices in full graph */
1273   int           *vwgts          = NULL;   /* weights for all vertices */
1274   float         *ewgts          = NULL;   /* weights for all edges */
1275   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1276   char          *outassignname  = NULL;   /*  name of assignment output file */
1277   char          *outfilename    = NULL;   /* output file name */
1278   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1279   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1280   int            mesh_dims[3];            /* dimensions of mesh of processors */
1281   double        *goal          = NULL;    /* desired set sizes for each set */
1282   int            global_method = 1;       /* global partitioning algorithm */
1283   int            local_method  = 1;       /* local partitioning algorithm */
1284   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1285   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1286   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1287   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1288   long           seed          = 123636512; /* for random graph mutations */
1289 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1290   int           *assignment;              /* Output partition */
1291 #else
1292   short int     *assignment;              /* Output partition */
1293 #endif
1294   int            fd_stdout, fd_pipe[2];
1295   PetscInt      *points;
1296   int            i, v, p;
1297   PetscErrorCode ierr;
1298 
1299   PetscFunctionBegin;
1300   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1301 #if defined (PETSC_USE_DEBUG)
1302   {
1303     int ival,isum;
1304     PetscBool distributed;
1305 
1306     ival = (numVertices > 0);
1307     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1308     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1309     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1310   }
1311 #endif
1312   if (!numVertices) {
1313     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1314     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1315     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1316     PetscFunctionReturn(0);
1317   }
1318   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1319   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1320 
1321   if (global_method == INERTIAL_METHOD) {
1322     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1323     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1324   }
1325   mesh_dims[0] = nparts;
1326   mesh_dims[1] = 1;
1327   mesh_dims[2] = 1;
1328   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1329   /* Chaco outputs to stdout. We redirect this to a buffer. */
1330   /* TODO: check error codes for UNIX calls */
1331 #if defined(PETSC_HAVE_UNISTD_H)
1332   {
1333     int piperet;
1334     piperet = pipe(fd_pipe);
1335     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1336     fd_stdout = dup(1);
1337     close(1);
1338     dup2(fd_pipe[1], 1);
1339   }
1340 #endif
1341   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1342                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1343                    vmax, ndims, eigtol, seed);
1344 #if defined(PETSC_HAVE_UNISTD_H)
1345   {
1346     char msgLog[10000];
1347     int  count;
1348 
1349     fflush(stdout);
1350     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1351     if (count < 0) count = 0;
1352     msgLog[count] = 0;
1353     close(1);
1354     dup2(fd_stdout, 1);
1355     close(fd_stdout);
1356     close(fd_pipe[0]);
1357     close(fd_pipe[1]);
1358     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1359   }
1360 #else
1361   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1362 #endif
1363   /* Convert to PetscSection+IS */
1364   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1365   for (v = 0; v < nvtxs; ++v) {
1366     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1367   }
1368   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1369   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1370   for (p = 0, i = 0; p < nparts; ++p) {
1371     for (v = 0; v < nvtxs; ++v) {
1372       if (assignment[v] == p) points[i++] = v;
1373     }
1374   }
1375   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1376   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1377   if (global_method == INERTIAL_METHOD) {
1378     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1379   }
1380   ierr = PetscFree(assignment);CHKERRQ(ierr);
1381   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1382   PetscFunctionReturn(0);
1383 #else
1384   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1385 #endif
1386 }
1387 
1388 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1389 {
1390   PetscFunctionBegin;
1391   part->ops->view      = PetscPartitionerView_Chaco;
1392   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1393   part->ops->partition = PetscPartitionerPartition_Chaco;
1394   PetscFunctionReturn(0);
1395 }
1396 
1397 /*MC
1398   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1399 
1400   Level: intermediate
1401 
1402 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1403 M*/
1404 
1405 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1406 {
1407   PetscPartitioner_Chaco *p;
1408   PetscErrorCode          ierr;
1409 
1410   PetscFunctionBegin;
1411   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1412   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1413   part->data = p;
1414 
1415   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1416   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1417   PetscFunctionReturn(0);
1418 }
1419 
1420 static const char *ptypes[] = {"kway", "rb"};
1421 
1422 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1423 {
1424   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1425   PetscErrorCode             ierr;
1426 
1427   PetscFunctionBegin;
1428   ierr = PetscFree(p);CHKERRQ(ierr);
1429   PetscFunctionReturn(0);
1430 }
1431 
1432 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1433 {
1434   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1435   PetscErrorCode             ierr;
1436 
1437   PetscFunctionBegin;
1438   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1439   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1440   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1441   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1442   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1443   PetscFunctionReturn(0);
1444 }
1445 
1446 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1447 {
1448   PetscBool      iascii;
1449   PetscErrorCode ierr;
1450 
1451   PetscFunctionBegin;
1452   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1453   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1454   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1455   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1460 {
1461   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1462   PetscErrorCode            ierr;
1463 
1464   PetscFunctionBegin;
1465   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1466   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1467   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1468   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1469   ierr = PetscOptionsTail();CHKERRQ(ierr);
1470   PetscFunctionReturn(0);
1471 }
1472 
1473 #if defined(PETSC_HAVE_PARMETIS)
1474 #include <parmetis.h>
1475 #endif
1476 
1477 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1478 {
1479 #if defined(PETSC_HAVE_PARMETIS)
1480   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1481   MPI_Comm       comm;
1482   PetscSection   section;
1483   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1484   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1485   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1486   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1487   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1488   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1489   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1490   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1491   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1492   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1493   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1494   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1495   PetscInt       options[64];               /* Options */
1496   /* Outputs */
1497   PetscInt       v, i, *assignment, *points;
1498   PetscMPIInt    size, rank, p;
1499   PetscErrorCode ierr;
1500 
1501   PetscFunctionBegin;
1502   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1503   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1504   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1505   /* Calculate vertex distribution */
1506   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1507   vtxdist[0] = 0;
1508   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1509   for (p = 2; p <= size; ++p) {
1510     vtxdist[p] += vtxdist[p-1];
1511   }
1512   /* Calculate partition weights */
1513   for (p = 0; p < nparts; ++p) {
1514     tpwgts[p] = 1.0/nparts;
1515   }
1516   ubvec[0] = pm->imbalanceRatio;
1517   /* Weight cells by dofs on cell by default */
1518   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1519   if (section) {
1520     PetscInt cStart, cEnd, dof;
1521 
1522     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1523     for (v = cStart; v < cEnd; ++v) {
1524       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1525       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1526       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1527       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1528     }
1529   } else {
1530     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1531   }
1532   wgtflag |= 2; /* have weights on graph vertices */
1533 
1534   if (nparts == 1) {
1535     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1536   } else {
1537     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1538     if (vtxdist[p+1] == vtxdist[size]) {
1539       if (rank == p) {
1540         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1541         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1542         if (metis_ptype == 1) {
1543           PetscStackPush("METIS_PartGraphRecursive");
1544           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1545           PetscStackPop;
1546           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1547         } else {
1548           /*
1549            It would be nice to activate the two options below, but they would need some actual testing.
1550            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1551            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1552           */
1553           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1554           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1555           PetscStackPush("METIS_PartGraphKway");
1556           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1557           PetscStackPop;
1558           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1559         }
1560       }
1561     } else {
1562       options[0] = 1;
1563       options[1] = pm->debugFlag;
1564       PetscStackPush("ParMETIS_V3_PartKway");
1565       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1566       PetscStackPop;
1567       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1568     }
1569   }
1570   /* Convert to PetscSection+IS */
1571   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1572   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1573   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1574   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1575   for (p = 0, i = 0; p < nparts; ++p) {
1576     for (v = 0; v < nvtxs; ++v) {
1577       if (assignment[v] == p) points[i++] = v;
1578     }
1579   }
1580   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1581   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1582   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1583   PetscFunctionReturn(0);
1584 #else
1585   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1586 #endif
1587 }
1588 
1589 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1590 {
1591   PetscFunctionBegin;
1592   part->ops->view           = PetscPartitionerView_ParMetis;
1593   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1594   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1595   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1596   PetscFunctionReturn(0);
1597 }
1598 
1599 /*MC
1600   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1601 
1602   Level: intermediate
1603 
1604 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1605 M*/
1606 
1607 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1608 {
1609   PetscPartitioner_ParMetis *p;
1610   PetscErrorCode          ierr;
1611 
1612   PetscFunctionBegin;
1613   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1614   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1615   part->data = p;
1616 
1617   p->ptype          = 0;
1618   p->imbalanceRatio = 1.05;
1619   p->debugFlag      = 0;
1620 
1621   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1622   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1623   PetscFunctionReturn(0);
1624 }
1625 
1626 
1627 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1628 const char PTScotchPartitionerCitation[] =
1629   "@article{PTSCOTCH,\n"
1630   "  author  = {C. Chevalier and F. Pellegrini},\n"
1631   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1632   "  journal = {Parallel Computing},\n"
1633   "  volume  = {34},\n"
1634   "  number  = {6},\n"
1635   "  pages   = {318--331},\n"
1636   "  year    = {2008},\n"
1637   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1638   "}\n";
1639 
1640 typedef struct {
1641   PetscInt  strategy;
1642   PetscReal imbalance;
1643 } PetscPartitioner_PTScotch;
1644 
1645 static const char *const
1646 PTScotchStrategyList[] = {
1647   "DEFAULT",
1648   "QUALITY",
1649   "SPEED",
1650   "BALANCE",
1651   "SAFETY",
1652   "SCALABILITY",
1653   "RECURSIVE",
1654   "REMAP"
1655 };
1656 
1657 #if defined(PETSC_HAVE_PTSCOTCH)
1658 
1659 EXTERN_C_BEGIN
1660 #include <ptscotch.h>
1661 EXTERN_C_END
1662 
1663 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1664 
1665 static int PTScotch_Strategy(PetscInt strategy)
1666 {
1667   switch (strategy) {
1668   case  0: return SCOTCH_STRATDEFAULT;
1669   case  1: return SCOTCH_STRATQUALITY;
1670   case  2: return SCOTCH_STRATSPEED;
1671   case  3: return SCOTCH_STRATBALANCE;
1672   case  4: return SCOTCH_STRATSAFETY;
1673   case  5: return SCOTCH_STRATSCALABILITY;
1674   case  6: return SCOTCH_STRATRECURSIVE;
1675   case  7: return SCOTCH_STRATREMAP;
1676   default: return SCOTCH_STRATDEFAULT;
1677   }
1678 }
1679 
1680 
1681 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1682                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1683 {
1684   SCOTCH_Graph   grafdat;
1685   SCOTCH_Strat   stradat;
1686   SCOTCH_Num     vertnbr = n;
1687   SCOTCH_Num     edgenbr = xadj[n];
1688   SCOTCH_Num*    velotab = vtxwgt;
1689   SCOTCH_Num*    edlotab = adjwgt;
1690   SCOTCH_Num     flagval = strategy;
1691   double         kbalval = imbalance;
1692   PetscErrorCode ierr;
1693 
1694   PetscFunctionBegin;
1695   {
1696     PetscBool flg = PETSC_TRUE;
1697     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1698     if (!flg) velotab = NULL;
1699   }
1700   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1701   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1702   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1703   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1704 #if defined(PETSC_USE_DEBUG)
1705   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1706 #endif
1707   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1708   SCOTCH_stratExit(&stradat);
1709   SCOTCH_graphExit(&grafdat);
1710   PetscFunctionReturn(0);
1711 }
1712 
1713 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1714                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1715 {
1716   PetscMPIInt     procglbnbr;
1717   PetscMPIInt     proclocnum;
1718   SCOTCH_Arch     archdat;
1719   SCOTCH_Dgraph   grafdat;
1720   SCOTCH_Dmapping mappdat;
1721   SCOTCH_Strat    stradat;
1722   SCOTCH_Num      vertlocnbr;
1723   SCOTCH_Num      edgelocnbr;
1724   SCOTCH_Num*     veloloctab = vtxwgt;
1725   SCOTCH_Num*     edloloctab = adjwgt;
1726   SCOTCH_Num      flagval = strategy;
1727   double          kbalval = imbalance;
1728   PetscErrorCode  ierr;
1729 
1730   PetscFunctionBegin;
1731   {
1732     PetscBool flg = PETSC_TRUE;
1733     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1734     if (!flg) veloloctab = NULL;
1735   }
1736   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1737   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1738   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1739   edgelocnbr = xadj[vertlocnbr];
1740 
1741   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1742   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1743 #if defined(PETSC_USE_DEBUG)
1744   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1745 #endif
1746   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1747   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1748   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1749   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1750   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1751   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1752   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1753   SCOTCH_archExit(&archdat);
1754   SCOTCH_stratExit(&stradat);
1755   SCOTCH_dgraphExit(&grafdat);
1756   PetscFunctionReturn(0);
1757 }
1758 
1759 #endif /* PETSC_HAVE_PTSCOTCH */
1760 
1761 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1762 {
1763   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1764   PetscErrorCode             ierr;
1765 
1766   PetscFunctionBegin;
1767   ierr = PetscFree(p);CHKERRQ(ierr);
1768   PetscFunctionReturn(0);
1769 }
1770 
1771 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1772 {
1773   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1774   PetscErrorCode            ierr;
1775 
1776   PetscFunctionBegin;
1777   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1778   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1779   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1780   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1781   PetscFunctionReturn(0);
1782 }
1783 
1784 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1785 {
1786   PetscBool      iascii;
1787   PetscErrorCode ierr;
1788 
1789   PetscFunctionBegin;
1790   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1791   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1792   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1793   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1794   PetscFunctionReturn(0);
1795 }
1796 
1797 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1798 {
1799   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1800   const char *const         *slist = PTScotchStrategyList;
1801   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1802   PetscBool                 flag;
1803   PetscErrorCode            ierr;
1804 
1805   PetscFunctionBegin;
1806   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1807   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1808   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1809   ierr = PetscOptionsTail();CHKERRQ(ierr);
1810   PetscFunctionReturn(0);
1811 }
1812 
1813 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1814 {
1815 #if defined(PETSC_HAVE_PTSCOTCH)
1816   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1817   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1818   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1819   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1820   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1821   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1822   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1823   PetscInt       v, i, *assignment, *points;
1824   PetscMPIInt    size, rank, p;
1825   PetscErrorCode ierr;
1826 
1827   PetscFunctionBegin;
1828   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1829   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1830   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1831 
1832   /* Calculate vertex distribution */
1833   vtxdist[0] = 0;
1834   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1835   for (p = 2; p <= size; ++p) {
1836     vtxdist[p] += vtxdist[p-1];
1837   }
1838 
1839   if (nparts == 1) {
1840     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1841   } else {
1842     PetscSection section;
1843     /* Weight cells by dofs on cell by default */
1844     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1845     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1846     if (section) {
1847       PetscInt vStart, vEnd, dof;
1848       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1849       for (v = vStart; v < vEnd; ++v) {
1850         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1851         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1852         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1853         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1854       }
1855     } else {
1856       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1857     }
1858     {
1859       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1860       int                       strat = PTScotch_Strategy(pts->strategy);
1861       double                    imbal = (double)pts->imbalance;
1862 
1863       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1864       if (vtxdist[p+1] == vtxdist[size]) {
1865         if (rank == p) {
1866           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1867         }
1868       } else {
1869         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1870       }
1871     }
1872     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1873   }
1874 
1875   /* Convert to PetscSection+IS */
1876   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1877   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1878   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1879   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1880   for (p = 0, i = 0; p < nparts; ++p) {
1881     for (v = 0; v < nvtxs; ++v) {
1882       if (assignment[v] == p) points[i++] = v;
1883     }
1884   }
1885   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1886   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1887 
1888   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1889   PetscFunctionReturn(0);
1890 #else
1891   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1892 #endif
1893 }
1894 
1895 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1896 {
1897   PetscFunctionBegin;
1898   part->ops->view           = PetscPartitionerView_PTScotch;
1899   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1900   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1901   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1902   PetscFunctionReturn(0);
1903 }
1904 
1905 /*MC
1906   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1907 
1908   Level: intermediate
1909 
1910 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1911 M*/
1912 
1913 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1914 {
1915   PetscPartitioner_PTScotch *p;
1916   PetscErrorCode          ierr;
1917 
1918   PetscFunctionBegin;
1919   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1920   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1921   part->data = p;
1922 
1923   p->strategy  = 0;
1924   p->imbalance = 0.01;
1925 
1926   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1927   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1928   PetscFunctionReturn(0);
1929 }
1930 
1931 
1932 /*@
1933   DMPlexGetPartitioner - Get the mesh partitioner
1934 
1935   Not collective
1936 
1937   Input Parameter:
1938 . dm - The DM
1939 
1940   Output Parameter:
1941 . part - The PetscPartitioner
1942 
1943   Level: developer
1944 
1945   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1946 
1947 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1948 @*/
1949 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1950 {
1951   DM_Plex       *mesh = (DM_Plex *) dm->data;
1952 
1953   PetscFunctionBegin;
1954   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1955   PetscValidPointer(part, 2);
1956   *part = mesh->partitioner;
1957   PetscFunctionReturn(0);
1958 }
1959 
1960 /*@
1961   DMPlexSetPartitioner - Set the mesh partitioner
1962 
1963   logically collective on dm and part
1964 
1965   Input Parameters:
1966 + dm - The DM
1967 - part - The partitioner
1968 
1969   Level: developer
1970 
1971   Note: Any existing PetscPartitioner will be destroyed.
1972 
1973 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1974 @*/
1975 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1976 {
1977   DM_Plex       *mesh = (DM_Plex *) dm->data;
1978   PetscErrorCode ierr;
1979 
1980   PetscFunctionBegin;
1981   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1982   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1983   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1984   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1985   mesh->partitioner = part;
1986   PetscFunctionReturn(0);
1987 }
1988 
1989 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
1990 {
1991   PetscErrorCode ierr;
1992 
1993   PetscFunctionBegin;
1994   if (up) {
1995     PetscInt parent;
1996 
1997     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1998     if (parent != point) {
1999       PetscInt closureSize, *closure = NULL, i;
2000 
2001       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2002       for (i = 0; i < closureSize; i++) {
2003         PetscInt cpoint = closure[2*i];
2004 
2005         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2006         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2007       }
2008       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2009     }
2010   }
2011   if (down) {
2012     PetscInt numChildren;
2013     const PetscInt *children;
2014 
2015     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2016     if (numChildren) {
2017       PetscInt i;
2018 
2019       for (i = 0; i < numChildren; i++) {
2020         PetscInt cpoint = children[i];
2021 
2022         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2023         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2024       }
2025     }
2026   }
2027   PetscFunctionReturn(0);
2028 }
2029 
2030 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
2031 
2032 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2033 {
2034   DM_Plex        *mesh = (DM_Plex *)dm->data;
2035   PetscBool      hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2036   PetscInt       *closure = NULL, closureSize, nelems, *elems, off = 0, p, c;
2037   PetscHSetI     ht;
2038   PetscErrorCode ierr;
2039 
2040   PetscFunctionBegin;
2041   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2042   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2043   for (p = 0; p < numPoints; ++p) {
2044     ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2045     for (c = 0; c < closureSize*2; c += 2) {
2046       ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2047       if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2048     }
2049   }
2050   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2051   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2052   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2053   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2054   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2055   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2056   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2057   PetscFunctionReturn(0);
2058 }
2059 
2060 /*@
2061   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2062 
2063   Input Parameters:
2064 + dm     - The DM
2065 - label  - DMLabel assinging ranks to remote roots
2066 
2067   Level: developer
2068 
2069 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2070 @*/
2071 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2072 {
2073   IS              rankIS,   pointIS, closureIS;
2074   const PetscInt *ranks,   *points;
2075   PetscInt        numRanks, numPoints, r;
2076   PetscErrorCode  ierr;
2077 
2078   PetscFunctionBegin;
2079   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2080   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2081   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2082   for (r = 0; r < numRanks; ++r) {
2083     const PetscInt rank = ranks[r];
2084     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2085     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2086     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2087     ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr);
2088     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2089     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2090     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2091     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2092   }
2093   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2094   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2095   PetscFunctionReturn(0);
2096 }
2097 
2098 /*@
2099   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2100 
2101   Input Parameters:
2102 + dm     - The DM
2103 - label  - DMLabel assinging ranks to remote roots
2104 
2105   Level: developer
2106 
2107 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2108 @*/
2109 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2110 {
2111   IS              rankIS,   pointIS;
2112   const PetscInt *ranks,   *points;
2113   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2114   PetscInt       *adj = NULL;
2115   PetscErrorCode  ierr;
2116 
2117   PetscFunctionBegin;
2118   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2119   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2120   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2121   for (r = 0; r < numRanks; ++r) {
2122     const PetscInt rank = ranks[r];
2123 
2124     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2125     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2126     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2127     for (p = 0; p < numPoints; ++p) {
2128       adjSize = PETSC_DETERMINE;
2129       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2130       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2131     }
2132     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2133     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2134   }
2135   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2136   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2137   ierr = PetscFree(adj);CHKERRQ(ierr);
2138   PetscFunctionReturn(0);
2139 }
2140 
2141 /*@
2142   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2143 
2144   Input Parameters:
2145 + dm     - The DM
2146 - label  - DMLabel assinging ranks to remote roots
2147 
2148   Level: developer
2149 
2150   Note: This is required when generating multi-level overlaps to capture
2151   overlap points from non-neighbouring partitions.
2152 
2153 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2154 @*/
2155 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2156 {
2157   MPI_Comm        comm;
2158   PetscMPIInt     rank;
2159   PetscSF         sfPoint;
2160   DMLabel         lblRoots, lblLeaves;
2161   IS              rankIS, pointIS;
2162   const PetscInt *ranks;
2163   PetscInt        numRanks, r;
2164   PetscErrorCode  ierr;
2165 
2166   PetscFunctionBegin;
2167   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2168   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2169   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2170   /* Pull point contributions from remote leaves into local roots */
2171   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2172   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2173   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2174   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2175   for (r = 0; r < numRanks; ++r) {
2176     const PetscInt remoteRank = ranks[r];
2177     if (remoteRank == rank) continue;
2178     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2179     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2180     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2181   }
2182   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2183   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2184   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2185   /* Push point contributions from roots into remote leaves */
2186   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2187   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2188   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2189   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2190   for (r = 0; r < numRanks; ++r) {
2191     const PetscInt remoteRank = ranks[r];
2192     if (remoteRank == rank) continue;
2193     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2194     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2195     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2196   }
2197   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2198   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2199   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2200   PetscFunctionReturn(0);
2201 }
2202 
2203 /*@
2204   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2205 
2206   Input Parameters:
2207 + dm        - The DM
2208 . rootLabel - DMLabel assinging ranks to local roots
2209 . processSF - A star forest mapping into the local index on each remote rank
2210 
2211   Output Parameter:
2212 - leafLabel - DMLabel assinging ranks to remote roots
2213 
2214   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2215   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2216 
2217   Level: developer
2218 
2219 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2220 @*/
2221 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2222 {
2223   MPI_Comm           comm;
2224   PetscMPIInt        rank, size, r;
2225   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2226   PetscSF            sfPoint;
2227   PetscSection       rootSection;
2228   PetscSFNode       *rootPoints, *leafPoints;
2229   const PetscSFNode *remote;
2230   const PetscInt    *local, *neighbors;
2231   IS                 valueIS;
2232   PetscBool          mpiOverflow = PETSC_FALSE;
2233   PetscErrorCode     ierr;
2234 
2235   PetscFunctionBegin;
2236   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2237   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2238   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2239   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2240 
2241   /* Convert to (point, rank) and use actual owners */
2242   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2243   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2244   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2245   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2246   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2247   for (n = 0; n < numNeighbors; ++n) {
2248     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2249     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2250   }
2251   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2252   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2253   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2254   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2255   for (n = 0; n < numNeighbors; ++n) {
2256     IS              pointIS;
2257     const PetscInt *points;
2258 
2259     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2260     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2261     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2262     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2263     for (p = 0; p < numPoints; ++p) {
2264       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2265       else       {l = -1;}
2266       if (l >= 0) {rootPoints[off+p] = remote[l];}
2267       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2268     }
2269     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2270     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2271   }
2272 
2273   /* Try to communicate overlap using All-to-All */
2274   if (!processSF) {
2275     PetscInt64  counter = 0;
2276     PetscBool   locOverflow = PETSC_FALSE;
2277     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2278 
2279     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2280     for (n = 0; n < numNeighbors; ++n) {
2281       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2282       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2283 #if defined(PETSC_USE_64BIT_INDICES)
2284       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2285       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2286 #endif
2287       scounts[neighbors[n]] = (PetscMPIInt) dof;
2288       sdispls[neighbors[n]] = (PetscMPIInt) off;
2289     }
2290     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2291     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2292     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2293     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2294     if (!mpiOverflow) {
2295       leafSize = (PetscInt) counter;
2296       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2297       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2298     }
2299     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2300   }
2301 
2302   /* Communicate overlap using process star forest */
2303   if (processSF || mpiOverflow) {
2304     PetscSF      procSF;
2305     PetscSFNode  *remote;
2306     PetscSection leafSection;
2307 
2308     if (processSF) {
2309       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2310       procSF = processSF;
2311     } else {
2312       ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr);
2313       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2314       ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr);
2315       ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2316     }
2317 
2318     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2319     ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2320     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2321     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2322     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2323   }
2324 
2325   for (p = 0; p < leafSize; p++) {
2326     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2327   }
2328 
2329   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2330   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2331   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2332   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2333   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2334   PetscFunctionReturn(0);
2335 }
2336 
2337 /*@
2338   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2339 
2340   Input Parameters:
2341 + dm    - The DM
2342 . label - DMLabel assinging ranks to remote roots
2343 
2344   Output Parameter:
2345 - sf    - The star forest communication context encapsulating the defined mapping
2346 
2347   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2348 
2349   Level: developer
2350 
2351 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2352 @*/
2353 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2354 {
2355   PetscMPIInt     rank, size;
2356   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2357   PetscSFNode    *remotePoints;
2358   IS              remoteRootIS;
2359   const PetscInt *remoteRoots;
2360   PetscErrorCode ierr;
2361 
2362   PetscFunctionBegin;
2363   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2364   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2365 
2366   for (numRemote = 0, n = 0; n < size; ++n) {
2367     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2368     numRemote += numPoints;
2369   }
2370   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2371   /* Put owned points first */
2372   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2373   if (numPoints > 0) {
2374     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2375     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2376     for (p = 0; p < numPoints; p++) {
2377       remotePoints[idx].index = remoteRoots[p];
2378       remotePoints[idx].rank = rank;
2379       idx++;
2380     }
2381     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2382     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2383   }
2384   /* Now add remote points */
2385   for (n = 0; n < size; ++n) {
2386     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2387     if (numPoints <= 0 || n == rank) continue;
2388     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2389     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2390     for (p = 0; p < numPoints; p++) {
2391       remotePoints[idx].index = remoteRoots[p];
2392       remotePoints[idx].rank = n;
2393       idx++;
2394     }
2395     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2396     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2397   }
2398   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2399   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2400   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2401   PetscFunctionReturn(0);
2402 }
2403