xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 3a074057bf25314dc21e177b1a2e45e66b0fa3b1)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410     part->ops->destroy = NULL;
411   }
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
470 {
471   PetscFunctionBegin;
472   if (!currentType) {
473 #if defined(PETSC_HAVE_CHACO)
474     *defaultType = PETSCPARTITIONERCHACO;
475 #elif defined(PETSC_HAVE_PARMETIS)
476     *defaultType = PETSCPARTITIONERPARMETIS;
477 #elif defined(PETSC_HAVE_PTSCOTCH)
478     *defaultType = PETSCPARTITIONERPTSCOTCH;
479 #else
480     *defaultType = PETSCPARTITIONERSIMPLE;
481 #endif
482   } else {
483     *defaultType = currentType;
484   }
485   PetscFunctionReturn(0);
486 }
487 
488 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
489 {
490   const char    *defaultType;
491   char           name[256];
492   PetscBool      flg;
493   PetscErrorCode ierr;
494 
495   PetscFunctionBegin;
496   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
497   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
498   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
499 
500   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
501   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
502   if (flg) {
503     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
504   } else if (!((PetscObject) part)->type_name) {
505     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
506   }
507   ierr = PetscOptionsEnd();CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 /*@
512   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
513 
514   Collective on PetscPartitioner
515 
516   Input Parameter:
517 . part - the PetscPartitioner object to set options for
518 
519   Level: developer
520 
521 .seealso: PetscPartitionerView()
522 @*/
523 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
524 {
525   PetscErrorCode ierr;
526 
527   PetscFunctionBegin;
528   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
529   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
530 
531   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
532   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
533   /* process any options handlers added with PetscObjectAddOptionsHandler() */
534   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
535   ierr = PetscOptionsEnd();CHKERRQ(ierr);
536   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
537   PetscFunctionReturn(0);
538 }
539 
540 /*@C
541   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
542 
543   Collective on PetscPartitioner
544 
545   Input Parameter:
546 . part - the PetscPartitioner object to setup
547 
548   Level: developer
549 
550 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
551 @*/
552 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
553 {
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
558   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
559   PetscFunctionReturn(0);
560 }
561 
562 /*@
563   PetscPartitionerDestroy - Destroys a PetscPartitioner object
564 
565   Collective on PetscPartitioner
566 
567   Input Parameter:
568 . part - the PetscPartitioner object to destroy
569 
570   Level: developer
571 
572 .seealso: PetscPartitionerView()
573 @*/
574 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
575 {
576   PetscErrorCode ierr;
577 
578   PetscFunctionBegin;
579   if (!*part) PetscFunctionReturn(0);
580   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
581 
582   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
583   ((PetscObject) (*part))->refct = 0;
584 
585   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
586   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
587   PetscFunctionReturn(0);
588 }
589 
590 /*@
591   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
592 
593   Collective on MPI_Comm
594 
595   Input Parameter:
596 . comm - The communicator for the PetscPartitioner object
597 
598   Output Parameter:
599 . part - The PetscPartitioner object
600 
601   Level: beginner
602 
603 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
604 @*/
605 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
606 {
607   PetscPartitioner p;
608   const char       *partitionerType = NULL;
609   PetscErrorCode   ierr;
610 
611   PetscFunctionBegin;
612   PetscValidPointer(part, 2);
613   *part = NULL;
614   ierr = DMInitializePackage();CHKERRQ(ierr);
615 
616   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
617   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
618   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
619 
620   *part = p;
621   PetscFunctionReturn(0);
622 }
623 
624 /*@
625   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
626 
627   Collective on DM
628 
629   Input Parameters:
630 + part    - The PetscPartitioner
631 - dm      - The mesh DM
632 
633   Output Parameters:
634 + partSection     - The PetscSection giving the division of points by partition
635 - partition       - The list of points by partition
636 
637   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
638 
639   Level: developer
640 
641 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
642 @*/
643 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
644 {
645   PetscMPIInt    size;
646   PetscErrorCode ierr;
647 
648   PetscFunctionBegin;
649   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
650   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
651   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
652   PetscValidPointer(partition, 5);
653   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
654   if (size == 1) {
655     PetscInt *points;
656     PetscInt  cStart, cEnd, c;
657 
658     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
659     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
660     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
661     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
662     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
663     for (c = cStart; c < cEnd; ++c) points[c] = c;
664     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
665     PetscFunctionReturn(0);
666   }
667   if (part->height == 0) {
668     PetscInt numVertices;
669     PetscInt *start     = NULL;
670     PetscInt *adjacency = NULL;
671     IS       globalNumbering;
672 
673     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
674     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
675     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
676     ierr = PetscFree(start);CHKERRQ(ierr);
677     ierr = PetscFree(adjacency);CHKERRQ(ierr);
678     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
679       const PetscInt *globalNum;
680       const PetscInt *partIdx;
681       PetscInt *map, cStart, cEnd;
682       PetscInt *adjusted, i, localSize, offset;
683       IS    newPartition;
684 
685       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
686       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
687       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
688       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
689       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
690       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
691       for (i = cStart, offset = 0; i < cEnd; i++) {
692         if (globalNum[i - cStart] >= 0) map[offset++] = i;
693       }
694       for (i = 0; i < localSize; i++) {
695         adjusted[i] = map[partIdx[i]];
696       }
697       ierr = PetscFree(map);CHKERRQ(ierr);
698       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
699       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
700       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
701       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
702       ierr = ISDestroy(partition);CHKERRQ(ierr);
703       *partition = newPartition;
704     }
705   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
706   PetscFunctionReturn(0);
707 
708 }
709 
710 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
711 {
712   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
713   PetscErrorCode          ierr;
714 
715   PetscFunctionBegin;
716   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
717   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
718   ierr = PetscFree(p);CHKERRQ(ierr);
719   PetscFunctionReturn(0);
720 }
721 
722 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
723 {
724   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
725   PetscViewerFormat       format;
726   PetscErrorCode          ierr;
727 
728   PetscFunctionBegin;
729   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
730   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
731   if (p->random) {
732     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
733     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
734     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
735   }
736   PetscFunctionReturn(0);
737 }
738 
739 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
740 {
741   PetscBool      iascii;
742   PetscErrorCode ierr;
743 
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
746   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
747   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
748   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
749   PetscFunctionReturn(0);
750 }
751 
752 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
753 {
754   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
755   PetscErrorCode          ierr;
756 
757   PetscFunctionBegin;
758   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
759   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
760   ierr = PetscOptionsTail();CHKERRQ(ierr);
761   PetscFunctionReturn(0);
762 }
763 
764 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
765 {
766   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
767   PetscInt                np;
768   PetscErrorCode          ierr;
769 
770   PetscFunctionBegin;
771   if (p->random) {
772     PetscRandom r;
773     PetscInt   *sizes, *points, v, p;
774     PetscMPIInt rank;
775 
776     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
777     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
778     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
779     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
780     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
781     for (v = 0; v < numVertices; ++v) {points[v] = v;}
782     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
783     for (v = numVertices-1; v > 0; --v) {
784       PetscReal val;
785       PetscInt  w, tmp;
786 
787       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
788       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
789       w    = PetscFloorReal(val);
790       tmp       = points[v];
791       points[v] = points[w];
792       points[w] = tmp;
793     }
794     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
795     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
796     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
797   }
798   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
799   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
800   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
801   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
802   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
803   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
804   *partition = p->partition;
805   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
810 {
811   PetscFunctionBegin;
812   part->ops->view           = PetscPartitionerView_Shell;
813   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
814   part->ops->destroy        = PetscPartitionerDestroy_Shell;
815   part->ops->partition      = PetscPartitionerPartition_Shell;
816   PetscFunctionReturn(0);
817 }
818 
819 /*MC
820   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
821 
822   Level: intermediate
823 
824 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
825 M*/
826 
827 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
828 {
829   PetscPartitioner_Shell *p;
830   PetscErrorCode          ierr;
831 
832   PetscFunctionBegin;
833   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
834   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
835   part->data = p;
836 
837   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
838   p->random = PETSC_FALSE;
839   PetscFunctionReturn(0);
840 }
841 
842 /*@C
843   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
844 
845   Collective on Part
846 
847   Input Parameters:
848 + part     - The PetscPartitioner
849 . size - The number of partitions
850 . sizes    - array of size size (or NULL) providing the number of points in each partition
851 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
852 
853   Level: developer
854 
855   Notes:
856 
857     It is safe to free the sizes and points arrays after use in this routine.
858 
859 .seealso DMPlexDistribute(), PetscPartitionerCreate()
860 @*/
861 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
862 {
863   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
864   PetscInt                proc, numPoints;
865   PetscErrorCode          ierr;
866 
867   PetscFunctionBegin;
868   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
869   if (sizes)  {PetscValidPointer(sizes, 3);}
870   if (points) {PetscValidPointer(points, 4);}
871   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
872   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
873   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
874   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
875   if (sizes) {
876     for (proc = 0; proc < size; ++proc) {
877       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
878     }
879   }
880   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
881   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
882   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 /*@
887   PetscPartitionerShellSetRandom - Set the flag to use a random partition
888 
889   Collective on Part
890 
891   Input Parameters:
892 + part   - The PetscPartitioner
893 - random - The flag to use a random partition
894 
895   Level: intermediate
896 
897 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
898 @*/
899 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
900 {
901   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
902 
903   PetscFunctionBegin;
904   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
905   p->random = random;
906   PetscFunctionReturn(0);
907 }
908 
909 /*@
910   PetscPartitionerShellGetRandom - get the flag to use a random partition
911 
912   Collective on Part
913 
914   Input Parameter:
915 . part   - The PetscPartitioner
916 
917   Output Parameter
918 . random - The flag to use a random partition
919 
920   Level: intermediate
921 
922 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
923 @*/
924 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
925 {
926   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
927 
928   PetscFunctionBegin;
929   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
930   PetscValidPointer(random, 2);
931   *random = p->random;
932   PetscFunctionReturn(0);
933 }
934 
935 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
936 {
937   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
938   PetscErrorCode          ierr;
939 
940   PetscFunctionBegin;
941   ierr = PetscFree(p);CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
946 {
947   PetscViewerFormat format;
948   PetscErrorCode    ierr;
949 
950   PetscFunctionBegin;
951   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
952   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
953   PetscFunctionReturn(0);
954 }
955 
956 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
957 {
958   PetscBool      iascii;
959   PetscErrorCode ierr;
960 
961   PetscFunctionBegin;
962   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
963   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
964   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
965   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
966   PetscFunctionReturn(0);
967 }
968 
969 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
970 {
971   MPI_Comm       comm;
972   PetscInt       np;
973   PetscMPIInt    size;
974   PetscErrorCode ierr;
975 
976   PetscFunctionBegin;
977   comm = PetscObjectComm((PetscObject)dm);
978   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
979   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
980   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
981   if (size == 1) {
982     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
983   }
984   else {
985     PetscMPIInt rank;
986     PetscInt nvGlobal, *offsets, myFirst, myLast;
987 
988     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
989     offsets[0] = 0;
990     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
991     for (np = 2; np <= size; np++) {
992       offsets[np] += offsets[np-1];
993     }
994     nvGlobal = offsets[size];
995     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
996     myFirst = offsets[rank];
997     myLast  = offsets[rank + 1] - 1;
998     ierr = PetscFree(offsets);CHKERRQ(ierr);
999     if (numVertices) {
1000       PetscInt firstPart = 0, firstLargePart = 0;
1001       PetscInt lastPart = 0, lastLargePart = 0;
1002       PetscInt rem = nvGlobal % nparts;
1003       PetscInt pSmall = nvGlobal/nparts;
1004       PetscInt pBig = nvGlobal/nparts + 1;
1005 
1006 
1007       if (rem) {
1008         firstLargePart = myFirst / pBig;
1009         lastLargePart  = myLast  / pBig;
1010 
1011         if (firstLargePart < rem) {
1012           firstPart = firstLargePart;
1013         }
1014         else {
1015           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1016         }
1017         if (lastLargePart < rem) {
1018           lastPart = lastLargePart;
1019         }
1020         else {
1021           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1022         }
1023       }
1024       else {
1025         firstPart = myFirst / (nvGlobal/nparts);
1026         lastPart  = myLast  / (nvGlobal/nparts);
1027       }
1028 
1029       for (np = firstPart; np <= lastPart; np++) {
1030         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1031         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1032 
1033         PartStart = PetscMax(PartStart,myFirst);
1034         PartEnd   = PetscMin(PartEnd,myLast+1);
1035         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1036       }
1037     }
1038   }
1039   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1040   PetscFunctionReturn(0);
1041 }
1042 
1043 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1044 {
1045   PetscFunctionBegin;
1046   part->ops->view      = PetscPartitionerView_Simple;
1047   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1048   part->ops->partition = PetscPartitionerPartition_Simple;
1049   PetscFunctionReturn(0);
1050 }
1051 
1052 /*MC
1053   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1054 
1055   Level: intermediate
1056 
1057 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1058 M*/
1059 
1060 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1061 {
1062   PetscPartitioner_Simple *p;
1063   PetscErrorCode           ierr;
1064 
1065   PetscFunctionBegin;
1066   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1067   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1068   part->data = p;
1069 
1070   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1075 {
1076   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1077   PetscErrorCode          ierr;
1078 
1079   PetscFunctionBegin;
1080   ierr = PetscFree(p);CHKERRQ(ierr);
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1085 {
1086   PetscViewerFormat format;
1087   PetscErrorCode    ierr;
1088 
1089   PetscFunctionBegin;
1090   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1091   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1092   PetscFunctionReturn(0);
1093 }
1094 
1095 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1096 {
1097   PetscBool      iascii;
1098   PetscErrorCode ierr;
1099 
1100   PetscFunctionBegin;
1101   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1102   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1103   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1104   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1105   PetscFunctionReturn(0);
1106 }
1107 
1108 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1109 {
1110   PetscInt       np;
1111   PetscErrorCode ierr;
1112 
1113   PetscFunctionBegin;
1114   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1115   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1116   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1117   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1118   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1119   PetscFunctionReturn(0);
1120 }
1121 
1122 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1123 {
1124   PetscFunctionBegin;
1125   part->ops->view      = PetscPartitionerView_Gather;
1126   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1127   part->ops->partition = PetscPartitionerPartition_Gather;
1128   PetscFunctionReturn(0);
1129 }
1130 
1131 /*MC
1132   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1133 
1134   Level: intermediate
1135 
1136 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1137 M*/
1138 
1139 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1140 {
1141   PetscPartitioner_Gather *p;
1142   PetscErrorCode           ierr;
1143 
1144   PetscFunctionBegin;
1145   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1146   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1147   part->data = p;
1148 
1149   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1150   PetscFunctionReturn(0);
1151 }
1152 
1153 
1154 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1155 {
1156   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1157   PetscErrorCode          ierr;
1158 
1159   PetscFunctionBegin;
1160   ierr = PetscFree(p);CHKERRQ(ierr);
1161   PetscFunctionReturn(0);
1162 }
1163 
1164 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1165 {
1166   PetscViewerFormat format;
1167   PetscErrorCode    ierr;
1168 
1169   PetscFunctionBegin;
1170   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1171   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1172   PetscFunctionReturn(0);
1173 }
1174 
1175 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1176 {
1177   PetscBool      iascii;
1178   PetscErrorCode ierr;
1179 
1180   PetscFunctionBegin;
1181   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1182   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1183   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1184   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1185   PetscFunctionReturn(0);
1186 }
1187 
1188 #if defined(PETSC_HAVE_CHACO)
1189 #if defined(PETSC_HAVE_UNISTD_H)
1190 #include <unistd.h>
1191 #endif
1192 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1193 #include <chaco.h>
1194 #else
1195 /* Older versions of Chaco do not have an include file */
1196 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1197                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1198                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1199                        int mesh_dims[3], double *goal, int global_method, int local_method,
1200                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1201 #endif
1202 extern int FREE_GRAPH;
1203 #endif
1204 
1205 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1206 {
1207 #if defined(PETSC_HAVE_CHACO)
1208   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1209   MPI_Comm       comm;
1210   int            nvtxs          = numVertices; /* number of vertices in full graph */
1211   int           *vwgts          = NULL;   /* weights for all vertices */
1212   float         *ewgts          = NULL;   /* weights for all edges */
1213   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1214   char          *outassignname  = NULL;   /*  name of assignment output file */
1215   char          *outfilename    = NULL;   /* output file name */
1216   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1217   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1218   int            mesh_dims[3];            /* dimensions of mesh of processors */
1219   double        *goal          = NULL;    /* desired set sizes for each set */
1220   int            global_method = 1;       /* global partitioning algorithm */
1221   int            local_method  = 1;       /* local partitioning algorithm */
1222   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1223   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1224   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1225   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1226   long           seed          = 123636512; /* for random graph mutations */
1227 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1228   int           *assignment;              /* Output partition */
1229 #else
1230   short int     *assignment;              /* Output partition */
1231 #endif
1232   int            fd_stdout, fd_pipe[2];
1233   PetscInt      *points;
1234   int            i, v, p;
1235   PetscErrorCode ierr;
1236 
1237   PetscFunctionBegin;
1238   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1239 #if defined (PETSC_USE_DEBUG)
1240   {
1241     int ival,isum;
1242     PetscBool distributed;
1243 
1244     ival = (numVertices > 0);
1245     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1246     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1247     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1248   }
1249 #endif
1250   if (!numVertices) {
1251     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1252     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1253     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1254     PetscFunctionReturn(0);
1255   }
1256   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1257   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1258 
1259   if (global_method == INERTIAL_METHOD) {
1260     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1261     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1262   }
1263   mesh_dims[0] = nparts;
1264   mesh_dims[1] = 1;
1265   mesh_dims[2] = 1;
1266   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1267   /* Chaco outputs to stdout. We redirect this to a buffer. */
1268   /* TODO: check error codes for UNIX calls */
1269 #if defined(PETSC_HAVE_UNISTD_H)
1270   {
1271     int piperet;
1272     piperet = pipe(fd_pipe);
1273     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1274     fd_stdout = dup(1);
1275     close(1);
1276     dup2(fd_pipe[1], 1);
1277   }
1278 #endif
1279   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1280                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1281                    vmax, ndims, eigtol, seed);
1282 #if defined(PETSC_HAVE_UNISTD_H)
1283   {
1284     char msgLog[10000];
1285     int  count;
1286 
1287     fflush(stdout);
1288     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1289     if (count < 0) count = 0;
1290     msgLog[count] = 0;
1291     close(1);
1292     dup2(fd_stdout, 1);
1293     close(fd_stdout);
1294     close(fd_pipe[0]);
1295     close(fd_pipe[1]);
1296     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1297   }
1298 #else
1299   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1300 #endif
1301   /* Convert to PetscSection+IS */
1302   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1303   for (v = 0; v < nvtxs; ++v) {
1304     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1305   }
1306   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1307   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1308   for (p = 0, i = 0; p < nparts; ++p) {
1309     for (v = 0; v < nvtxs; ++v) {
1310       if (assignment[v] == p) points[i++] = v;
1311     }
1312   }
1313   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1314   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1315   if (global_method == INERTIAL_METHOD) {
1316     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1317   }
1318   ierr = PetscFree(assignment);CHKERRQ(ierr);
1319   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1320   PetscFunctionReturn(0);
1321 #else
1322   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1323 #endif
1324 }
1325 
1326 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1327 {
1328   PetscFunctionBegin;
1329   part->ops->view      = PetscPartitionerView_Chaco;
1330   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1331   part->ops->partition = PetscPartitionerPartition_Chaco;
1332   PetscFunctionReturn(0);
1333 }
1334 
1335 /*MC
1336   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1337 
1338   Level: intermediate
1339 
1340 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1341 M*/
1342 
1343 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1344 {
1345   PetscPartitioner_Chaco *p;
1346   PetscErrorCode          ierr;
1347 
1348   PetscFunctionBegin;
1349   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1350   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1351   part->data = p;
1352 
1353   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1354   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1355   PetscFunctionReturn(0);
1356 }
1357 
1358 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1359 {
1360   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1361   PetscErrorCode             ierr;
1362 
1363   PetscFunctionBegin;
1364   ierr = PetscFree(p);CHKERRQ(ierr);
1365   PetscFunctionReturn(0);
1366 }
1367 
1368 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1369 {
1370   PetscViewerFormat format;
1371   PetscErrorCode    ierr;
1372 
1373   PetscFunctionBegin;
1374   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1375   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1376   PetscFunctionReturn(0);
1377 }
1378 
1379 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1380 {
1381   PetscBool      iascii;
1382   PetscErrorCode ierr;
1383 
1384   PetscFunctionBegin;
1385   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1386   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1387   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1388   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1389   PetscFunctionReturn(0);
1390 }
1391 
1392 #if defined(PETSC_HAVE_PARMETIS)
1393 #include <parmetis.h>
1394 #endif
1395 
1396 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1397 {
1398 #if defined(PETSC_HAVE_PARMETIS)
1399   MPI_Comm       comm;
1400   PetscSection   section;
1401   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1402   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1403   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1404   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1405   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1406   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1407   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1408   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1409   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1410   real_t        *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1411   real_t        *ubvec;                    /* The balance intolerance for vertex weights */
1412   PetscInt       options[64];              /* Options */
1413   /* Outputs */
1414   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1415   PetscInt       v, i, *assignment, *points;
1416   PetscMPIInt    size, rank, p;
1417   PetscErrorCode ierr;
1418 
1419   PetscFunctionBegin;
1420   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1421   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1422   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1423   options[0] = 0; /* Use all defaults */
1424   /* Calculate vertex distribution */
1425   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1426   vtxdist[0] = 0;
1427   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1428   for (p = 2; p <= size; ++p) {
1429     vtxdist[p] += vtxdist[p-1];
1430   }
1431   /* Calculate weights */
1432   for (p = 0; p < nparts; ++p) {
1433     tpwgts[p] = 1.0/nparts;
1434   }
1435   ubvec[0] = 1.05;
1436   /* Weight cells by dofs on cell by default */
1437   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1438   if (section) {
1439     PetscInt cStart, cEnd, dof;
1440 
1441     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1442     for (v = cStart; v < cEnd; ++v) {
1443       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1444       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1445       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1446       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1447     }
1448   } else {
1449     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1450   }
1451 
1452   if (nparts == 1) {
1453     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1454   } else {
1455     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1456     if (vtxdist[p+1] == vtxdist[size]) {
1457       if (rank == p) {
1458         PetscStackPush("METIS_PartGraphKway");
1459         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1460         PetscStackPop;
1461         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1462       }
1463     } else {
1464       PetscStackPush("ParMETIS_V3_PartKway");
1465       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1466       PetscStackPop;
1467       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1468     }
1469   }
1470   /* Convert to PetscSection+IS */
1471   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1472   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1473   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1474   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1475   for (p = 0, i = 0; p < nparts; ++p) {
1476     for (v = 0; v < nvtxs; ++v) {
1477       if (assignment[v] == p) points[i++] = v;
1478     }
1479   }
1480   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1481   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1482   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1483   PetscFunctionReturn(0);
1484 #else
1485   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1486 #endif
1487 }
1488 
1489 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1490 {
1491   PetscFunctionBegin;
1492   part->ops->view      = PetscPartitionerView_ParMetis;
1493   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1494   part->ops->partition = PetscPartitionerPartition_ParMetis;
1495   PetscFunctionReturn(0);
1496 }
1497 
1498 /*MC
1499   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1500 
1501   Level: intermediate
1502 
1503 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1504 M*/
1505 
1506 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1507 {
1508   PetscPartitioner_ParMetis *p;
1509   PetscErrorCode          ierr;
1510 
1511   PetscFunctionBegin;
1512   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1513   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1514   part->data = p;
1515 
1516   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1517   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1518   PetscFunctionReturn(0);
1519 }
1520 
1521 
1522 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1523 const char PTScotchPartitionerCitation[] =
1524   "@article{PTSCOTCH,\n"
1525   "  author  = {C. Chevalier and F. Pellegrini},\n"
1526   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1527   "  journal = {Parallel Computing},\n"
1528   "  volume  = {34},\n"
1529   "  number  = {6},\n"
1530   "  pages   = {318--331},\n"
1531   "  year    = {2008},\n"
1532   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1533   "}\n";
1534 
1535 typedef struct {
1536   PetscInt  strategy;
1537   PetscReal imbalance;
1538 } PetscPartitioner_PTScotch;
1539 
1540 static const char *const
1541 PTScotchStrategyList[] = {
1542   "DEFAULT",
1543   "QUALITY",
1544   "SPEED",
1545   "BALANCE",
1546   "SAFETY",
1547   "SCALABILITY",
1548   "RECURSIVE",
1549   "REMAP"
1550 };
1551 
1552 #if defined(PETSC_HAVE_PTSCOTCH)
1553 
1554 EXTERN_C_BEGIN
1555 #include <ptscotch.h>
1556 EXTERN_C_END
1557 
1558 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1559 
1560 static int PTScotch_Strategy(PetscInt strategy)
1561 {
1562   switch (strategy) {
1563   case  0: return SCOTCH_STRATDEFAULT;
1564   case  1: return SCOTCH_STRATQUALITY;
1565   case  2: return SCOTCH_STRATSPEED;
1566   case  3: return SCOTCH_STRATBALANCE;
1567   case  4: return SCOTCH_STRATSAFETY;
1568   case  5: return SCOTCH_STRATSCALABILITY;
1569   case  6: return SCOTCH_STRATRECURSIVE;
1570   case  7: return SCOTCH_STRATREMAP;
1571   default: return SCOTCH_STRATDEFAULT;
1572   }
1573 }
1574 
1575 
1576 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1577                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1578 {
1579   SCOTCH_Graph   grafdat;
1580   SCOTCH_Strat   stradat;
1581   SCOTCH_Num     vertnbr = n;
1582   SCOTCH_Num     edgenbr = xadj[n];
1583   SCOTCH_Num*    velotab = vtxwgt;
1584   SCOTCH_Num*    edlotab = adjwgt;
1585   SCOTCH_Num     flagval = strategy;
1586   double         kbalval = imbalance;
1587   PetscErrorCode ierr;
1588 
1589   PetscFunctionBegin;
1590   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1591   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1592   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1593   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1594 #if defined(PETSC_USE_DEBUG)
1595   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1596 #endif
1597   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1598   SCOTCH_stratExit(&stradat);
1599   SCOTCH_graphExit(&grafdat);
1600   PetscFunctionReturn(0);
1601 }
1602 
1603 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1604                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1605 {
1606   PetscMPIInt     procglbnbr;
1607   PetscMPIInt     proclocnum;
1608   SCOTCH_Arch     archdat;
1609   SCOTCH_Dgraph   grafdat;
1610   SCOTCH_Dmapping mappdat;
1611   SCOTCH_Strat    stradat;
1612   SCOTCH_Num      vertlocnbr;
1613   SCOTCH_Num      edgelocnbr;
1614   SCOTCH_Num*     veloloctab = vtxwgt;
1615   SCOTCH_Num*     edloloctab = adjwgt;
1616   SCOTCH_Num      flagval = strategy;
1617   double          kbalval = imbalance;
1618   PetscErrorCode  ierr;
1619 
1620   PetscFunctionBegin;
1621   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1622   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1623   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1624   edgelocnbr = xadj[vertlocnbr];
1625 
1626   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1627   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1628 #if defined(PETSC_USE_DEBUG)
1629   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1630 #endif
1631   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1632   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1633   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1634   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1635   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1636   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1637   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1638   SCOTCH_archExit(&archdat);
1639   SCOTCH_stratExit(&stradat);
1640   SCOTCH_dgraphExit(&grafdat);
1641   PetscFunctionReturn(0);
1642 }
1643 
1644 #endif /* PETSC_HAVE_PTSCOTCH */
1645 
1646 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1647 {
1648   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1649   PetscErrorCode             ierr;
1650 
1651   PetscFunctionBegin;
1652   ierr = PetscFree(p);CHKERRQ(ierr);
1653   PetscFunctionReturn(0);
1654 }
1655 
1656 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1657 {
1658   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1659   PetscErrorCode            ierr;
1660 
1661   PetscFunctionBegin;
1662   ierr = PetscViewerASCIIPrintf(viewer, "PTScotch Graph Partitioner:\n");CHKERRQ(ierr);
1663   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1664   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1665   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1666   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1667   PetscFunctionReturn(0);
1668 }
1669 
1670 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1671 {
1672   PetscBool      iascii;
1673   PetscErrorCode ierr;
1674 
1675   PetscFunctionBegin;
1676   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1677   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1678   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1679   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1680   PetscFunctionReturn(0);
1681 }
1682 
1683 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1684 {
1685   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1686   const char *const         *slist = PTScotchStrategyList;
1687   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1688   PetscBool                 flag;
1689   PetscErrorCode            ierr;
1690 
1691   PetscFunctionBegin;
1692   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerPTScotch Options");CHKERRQ(ierr);
1693   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1694   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1695   ierr = PetscOptionsTail();CHKERRQ(ierr);
1696   PetscFunctionReturn(0);
1697 }
1698 
1699 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1700 {
1701 #if defined(PETSC_HAVE_PTSCOTCH)
1702   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1703   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1704   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1705   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1706   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1707   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1708   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1709   PetscInt       v, i, *assignment, *points;
1710   PetscMPIInt    size, rank, p;
1711   PetscErrorCode ierr;
1712 
1713   PetscFunctionBegin;
1714   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1715   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1716   ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1717 
1718   /* Calculate vertex distribution */
1719   vtxdist[0] = 0;
1720   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1721   for (p = 2; p <= size; ++p) {
1722     vtxdist[p] += vtxdist[p-1];
1723   }
1724 
1725   if (nparts == 1) {
1726     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1727   } else {
1728     PetscSection section;
1729     /* Weight cells by dofs on cell by default */
1730     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1731     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1732     if (section) {
1733       PetscInt vStart, vEnd, dof;
1734       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1735       for (v = vStart; v < vEnd; ++v) {
1736         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1737         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1738         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1739         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1740       }
1741     } else {
1742       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1743     }
1744     {
1745       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1746       int                       strat = PTScotch_Strategy(pts->strategy);
1747       double                    imbal = (double)pts->imbalance;
1748 
1749       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1750       if (vtxdist[p+1] == vtxdist[size]) {
1751         if (rank == p) {
1752           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1753         }
1754       } else {
1755         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1756       }
1757     }
1758     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1759   }
1760 
1761   /* Convert to PetscSection+IS */
1762   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1763   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1764   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1765   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1766   for (p = 0, i = 0; p < nparts; ++p) {
1767     for (v = 0; v < nvtxs; ++v) {
1768       if (assignment[v] == p) points[i++] = v;
1769     }
1770   }
1771   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1772   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1773 
1774   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1775   PetscFunctionReturn(0);
1776 #else
1777   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1778 #endif
1779 }
1780 
1781 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1782 {
1783   PetscFunctionBegin;
1784   part->ops->view           = PetscPartitionerView_PTScotch;
1785   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1786   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1787   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1788   PetscFunctionReturn(0);
1789 }
1790 
1791 /*MC
1792   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1793 
1794   Level: intermediate
1795 
1796 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1797 M*/
1798 
1799 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1800 {
1801   PetscPartitioner_PTScotch *p;
1802   PetscErrorCode          ierr;
1803 
1804   PetscFunctionBegin;
1805   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1806   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1807   part->data = p;
1808 
1809   p->strategy  = 0;
1810   p->imbalance = 0.01;
1811 
1812   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1813   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1814   PetscFunctionReturn(0);
1815 }
1816 
1817 
1818 /*@
1819   DMPlexGetPartitioner - Get the mesh partitioner
1820 
1821   Not collective
1822 
1823   Input Parameter:
1824 . dm - The DM
1825 
1826   Output Parameter:
1827 . part - The PetscPartitioner
1828 
1829   Level: developer
1830 
1831   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1832 
1833 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1834 @*/
1835 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1836 {
1837   DM_Plex       *mesh = (DM_Plex *) dm->data;
1838 
1839   PetscFunctionBegin;
1840   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1841   PetscValidPointer(part, 2);
1842   *part = mesh->partitioner;
1843   PetscFunctionReturn(0);
1844 }
1845 
1846 /*@
1847   DMPlexSetPartitioner - Set the mesh partitioner
1848 
1849   logically collective on dm and part
1850 
1851   Input Parameters:
1852 + dm - The DM
1853 - part - The partitioner
1854 
1855   Level: developer
1856 
1857   Note: Any existing PetscPartitioner will be destroyed.
1858 
1859 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1860 @*/
1861 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1862 {
1863   DM_Plex       *mesh = (DM_Plex *) dm->data;
1864   PetscErrorCode ierr;
1865 
1866   PetscFunctionBegin;
1867   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1868   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1869   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1870   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1871   mesh->partitioner = part;
1872   PetscFunctionReturn(0);
1873 }
1874 
1875 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1876 {
1877   PetscErrorCode ierr;
1878 
1879   PetscFunctionBegin;
1880   if (up) {
1881     PetscInt parent;
1882 
1883     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1884     if (parent != point) {
1885       PetscInt closureSize, *closure = NULL, i;
1886 
1887       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1888       for (i = 0; i < closureSize; i++) {
1889         PetscInt cpoint = closure[2*i];
1890 
1891         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1892         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1893       }
1894       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1895     }
1896   }
1897   if (down) {
1898     PetscInt numChildren;
1899     const PetscInt *children;
1900 
1901     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1902     if (numChildren) {
1903       PetscInt i;
1904 
1905       for (i = 0; i < numChildren; i++) {
1906         PetscInt cpoint = children[i];
1907 
1908         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1909         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1910       }
1911     }
1912   }
1913   PetscFunctionReturn(0);
1914 }
1915 
1916 /*@
1917   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1918 
1919   Input Parameters:
1920 + dm     - The DM
1921 - label  - DMLabel assinging ranks to remote roots
1922 
1923   Level: developer
1924 
1925 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1926 @*/
1927 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1928 {
1929   IS              rankIS,   pointIS;
1930   const PetscInt *ranks,   *points;
1931   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1932   PetscInt       *closure = NULL;
1933   PetscErrorCode  ierr;
1934 
1935   PetscFunctionBegin;
1936   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1937   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1938   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1939   for (r = 0; r < numRanks; ++r) {
1940     const PetscInt rank = ranks[r];
1941 
1942     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1943     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1944     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1945     for (p = 0; p < numPoints; ++p) {
1946       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1947       for (c = 0; c < closureSize*2; c += 2) {
1948         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1949         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1950       }
1951     }
1952     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1953     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1954   }
1955   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1956   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1957   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1958   PetscFunctionReturn(0);
1959 }
1960 
1961 /*@
1962   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1963 
1964   Input Parameters:
1965 + dm     - The DM
1966 - label  - DMLabel assinging ranks to remote roots
1967 
1968   Level: developer
1969 
1970 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1971 @*/
1972 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1973 {
1974   IS              rankIS,   pointIS;
1975   const PetscInt *ranks,   *points;
1976   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1977   PetscInt       *adj = NULL;
1978   PetscErrorCode  ierr;
1979 
1980   PetscFunctionBegin;
1981   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1982   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1983   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1984   for (r = 0; r < numRanks; ++r) {
1985     const PetscInt rank = ranks[r];
1986 
1987     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1988     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1989     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1990     for (p = 0; p < numPoints; ++p) {
1991       adjSize = PETSC_DETERMINE;
1992       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1993       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1994     }
1995     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1996     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1997   }
1998   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1999   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2000   ierr = PetscFree(adj);CHKERRQ(ierr);
2001   PetscFunctionReturn(0);
2002 }
2003 
2004 /*@
2005   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2006 
2007   Input Parameters:
2008 + dm     - The DM
2009 - label  - DMLabel assinging ranks to remote roots
2010 
2011   Level: developer
2012 
2013   Note: This is required when generating multi-level overlaps to capture
2014   overlap points from non-neighbouring partitions.
2015 
2016 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2017 @*/
2018 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2019 {
2020   MPI_Comm        comm;
2021   PetscMPIInt     rank;
2022   PetscSF         sfPoint;
2023   DMLabel         lblRoots, lblLeaves;
2024   IS              rankIS, pointIS;
2025   const PetscInt *ranks;
2026   PetscInt        numRanks, r;
2027   PetscErrorCode  ierr;
2028 
2029   PetscFunctionBegin;
2030   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2031   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2032   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2033   /* Pull point contributions from remote leaves into local roots */
2034   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2035   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2036   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2037   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2038   for (r = 0; r < numRanks; ++r) {
2039     const PetscInt remoteRank = ranks[r];
2040     if (remoteRank == rank) continue;
2041     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2042     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2043     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2044   }
2045   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2046   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2047   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2048   /* Push point contributions from roots into remote leaves */
2049   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2050   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2051   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2052   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2053   for (r = 0; r < numRanks; ++r) {
2054     const PetscInt remoteRank = ranks[r];
2055     if (remoteRank == rank) continue;
2056     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2057     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2058     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2059   }
2060   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2061   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2062   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2063   PetscFunctionReturn(0);
2064 }
2065 
2066 /*@
2067   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2068 
2069   Input Parameters:
2070 + dm        - The DM
2071 . rootLabel - DMLabel assinging ranks to local roots
2072 . processSF - A star forest mapping into the local index on each remote rank
2073 
2074   Output Parameter:
2075 - leafLabel - DMLabel assinging ranks to remote roots
2076 
2077   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2078   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2079 
2080   Level: developer
2081 
2082 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2083 @*/
2084 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2085 {
2086   MPI_Comm           comm;
2087   PetscMPIInt        rank, size;
2088   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2089   PetscSF            sfPoint;
2090   PetscSFNode       *rootPoints, *leafPoints;
2091   PetscSection       rootSection, leafSection;
2092   const PetscSFNode *remote;
2093   const PetscInt    *local, *neighbors;
2094   IS                 valueIS;
2095   PetscErrorCode     ierr;
2096 
2097   PetscFunctionBegin;
2098   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2099   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2100   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2101   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2102 
2103   /* Convert to (point, rank) and use actual owners */
2104   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2105   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2106   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2107   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2108   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2109   for (n = 0; n < numNeighbors; ++n) {
2110     PetscInt numPoints;
2111 
2112     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2113     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2114   }
2115   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2116   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
2117   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
2118   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2119   for (n = 0; n < numNeighbors; ++n) {
2120     IS              pointIS;
2121     const PetscInt *points;
2122     PetscInt        off, numPoints, p;
2123 
2124     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2125     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2126     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2127     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2128     for (p = 0; p < numPoints; ++p) {
2129       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2130       else       {l = -1;}
2131       if (l >= 0) {rootPoints[off+p] = remote[l];}
2132       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2133     }
2134     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2135     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2136   }
2137   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2138   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2139   /* Communicate overlap */
2140   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2141   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2142   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2143   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
2144   for (p = 0; p < ssize; p++) {
2145     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2146   }
2147   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2148   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2149   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2150   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2151   PetscFunctionReturn(0);
2152 }
2153 
2154 /*@
2155   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2156 
2157   Input Parameters:
2158 + dm    - The DM
2159 . label - DMLabel assinging ranks to remote roots
2160 
2161   Output Parameter:
2162 - sf    - The star forest communication context encapsulating the defined mapping
2163 
2164   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2165 
2166   Level: developer
2167 
2168 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2169 @*/
2170 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2171 {
2172   PetscMPIInt     rank, size;
2173   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2174   PetscSFNode    *remotePoints;
2175   IS              remoteRootIS;
2176   const PetscInt *remoteRoots;
2177   PetscErrorCode ierr;
2178 
2179   PetscFunctionBegin;
2180   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2181   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2182 
2183   for (numRemote = 0, n = 0; n < size; ++n) {
2184     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2185     numRemote += numPoints;
2186   }
2187   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2188   /* Put owned points first */
2189   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2190   if (numPoints > 0) {
2191     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2192     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2193     for (p = 0; p < numPoints; p++) {
2194       remotePoints[idx].index = remoteRoots[p];
2195       remotePoints[idx].rank = rank;
2196       idx++;
2197     }
2198     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2199     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2200   }
2201   /* Now add remote points */
2202   for (n = 0; n < size; ++n) {
2203     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2204     if (numPoints <= 0 || n == rank) continue;
2205     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2206     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2207     for (p = 0; p < numPoints; p++) {
2208       remotePoints[idx].index = remoteRoots[p];
2209       remotePoints[idx].rank = n;
2210       idx++;
2211     }
2212     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2213     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2214   }
2215   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2216   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2217   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2218   PetscFunctionReturn(0);
2219 }
2220