xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 97a9ccd30fd95168d2009ddcb3942b90612f60fa)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410     part->ops->destroy = NULL;
411   }
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
470 {
471   const char    *defaultType;
472   char           name[256];
473   PetscBool      flg;
474   PetscErrorCode ierr;
475 
476   PetscFunctionBegin;
477   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
478   if (!((PetscObject) part)->type_name)
479 #if defined(PETSC_HAVE_CHACO)
480     defaultType = PETSCPARTITIONERCHACO;
481 #elif defined(PETSC_HAVE_PARMETIS)
482     defaultType = PETSCPARTITIONERPARMETIS;
483 #else
484     defaultType = PETSCPARTITIONERSIMPLE;
485 #endif
486   else
487     defaultType = ((PetscObject) part)->type_name;
488   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
489 
490   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
491   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
492   if (flg) {
493     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
494   } else if (!((PetscObject) part)->type_name) {
495     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
496   }
497   ierr = PetscOptionsEnd();CHKERRQ(ierr);
498   PetscFunctionReturn(0);
499 }
500 
501 /*@
502   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
503 
504   Collective on PetscPartitioner
505 
506   Input Parameter:
507 . part - the PetscPartitioner object to set options for
508 
509   Level: developer
510 
511 .seealso: PetscPartitionerView()
512 @*/
513 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
514 {
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
519   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
520 
521   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
522   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
523   /* process any options handlers added with PetscObjectAddOptionsHandler() */
524   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
525   ierr = PetscOptionsEnd();CHKERRQ(ierr);
526   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
527   PetscFunctionReturn(0);
528 }
529 
530 /*@C
531   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
532 
533   Collective on PetscPartitioner
534 
535   Input Parameter:
536 . part - the PetscPartitioner object to setup
537 
538   Level: developer
539 
540 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
541 @*/
542 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
543 {
544   PetscErrorCode ierr;
545 
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
548   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
549   PetscFunctionReturn(0);
550 }
551 
552 /*@
553   PetscPartitionerDestroy - Destroys a PetscPartitioner object
554 
555   Collective on PetscPartitioner
556 
557   Input Parameter:
558 . part - the PetscPartitioner object to destroy
559 
560   Level: developer
561 
562 .seealso: PetscPartitionerView()
563 @*/
564 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
565 {
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   if (!*part) PetscFunctionReturn(0);
570   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
571 
572   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
573   ((PetscObject) (*part))->refct = 0;
574 
575   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
576   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
582 
583   Collective on MPI_Comm
584 
585   Input Parameter:
586 . comm - The communicator for the PetscPartitioner object
587 
588   Output Parameter:
589 . part - The PetscPartitioner object
590 
591   Level: beginner
592 
593 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
594 @*/
595 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
596 {
597   PetscPartitioner p;
598   PetscErrorCode   ierr;
599 
600   PetscFunctionBegin;
601   PetscValidPointer(part, 2);
602   *part = NULL;
603   ierr = DMInitializePackage();CHKERRQ(ierr);
604 
605   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
606 
607   *part = p;
608   PetscFunctionReturn(0);
609 }
610 
611 /*@
612   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
613 
614   Collective on DM
615 
616   Input Parameters:
617 + part    - The PetscPartitioner
618 - dm      - The mesh DM
619 
620   Output Parameters:
621 + partSection     - The PetscSection giving the division of points by partition
622 - partition       - The list of points by partition
623 
624   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
625 
626   Level: developer
627 
628 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
629 @*/
630 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
631 {
632   PetscMPIInt    size;
633   PetscErrorCode ierr;
634 
635   PetscFunctionBegin;
636   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
637   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
638   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
639   PetscValidPointer(partition, 5);
640   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
641   if (size == 1) {
642     PetscInt *points;
643     PetscInt  cStart, cEnd, c;
644 
645     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
646     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
647     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
648     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
649     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
650     for (c = cStart; c < cEnd; ++c) points[c] = c;
651     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
652     PetscFunctionReturn(0);
653   }
654   if (part->height == 0) {
655     PetscInt numVertices;
656     PetscInt *start     = NULL;
657     PetscInt *adjacency = NULL;
658     IS       globalNumbering;
659 
660     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
661     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
662     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
663     ierr = PetscFree(start);CHKERRQ(ierr);
664     ierr = PetscFree(adjacency);CHKERRQ(ierr);
665     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
666       const PetscInt *globalNum;
667       const PetscInt *partIdx;
668       PetscInt *map, cStart, cEnd;
669       PetscInt *adjusted, i, localSize, offset;
670       IS    newPartition;
671 
672       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
673       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
674       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
675       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
676       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
677       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
678       for (i = cStart, offset = 0; i < cEnd; i++) {
679         if (globalNum[i - cStart] >= 0) map[offset++] = i;
680       }
681       for (i = 0; i < localSize; i++) {
682         adjusted[i] = map[partIdx[i]];
683       }
684       ierr = PetscFree(map);CHKERRQ(ierr);
685       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
686       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
687       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
688       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
689       ierr = ISDestroy(partition);CHKERRQ(ierr);
690       *partition = newPartition;
691     }
692   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
693   PetscFunctionReturn(0);
694 
695 }
696 
697 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
698 {
699   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
700   PetscErrorCode          ierr;
701 
702   PetscFunctionBegin;
703   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
704   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
705   ierr = PetscFree(p);CHKERRQ(ierr);
706   PetscFunctionReturn(0);
707 }
708 
709 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
710 {
711   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
712   PetscViewerFormat       format;
713   PetscErrorCode          ierr;
714 
715   PetscFunctionBegin;
716   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
717   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
718   if (p->random) {
719     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
720     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
721     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
722   }
723   PetscFunctionReturn(0);
724 }
725 
726 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
727 {
728   PetscBool      iascii;
729   PetscErrorCode ierr;
730 
731   PetscFunctionBegin;
732   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
733   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
734   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
735   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
736   PetscFunctionReturn(0);
737 }
738 
739 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
740 {
741   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
742   PetscErrorCode          ierr;
743 
744   PetscFunctionBegin;
745   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
746   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
747   ierr = PetscOptionsTail();CHKERRQ(ierr);
748   PetscFunctionReturn(0);
749 }
750 
751 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
752 {
753   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
754   PetscInt                np;
755   PetscErrorCode          ierr;
756 
757   PetscFunctionBegin;
758   if (p->random) {
759     PetscRandom r;
760     PetscInt   *sizes, *points, v;
761 
762     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
763     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (nparts+1));CHKERRQ(ierr);
764     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
765     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
766     for (v = 0; v < numVertices; ++v) {
767       PetscReal val;
768       PetscInt  part;
769 
770       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
771       part = PetscFloorReal(val);
772       points[v] = part;
773       ++sizes[part];
774     }
775     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
776     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
777     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
778   }
779   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
780   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
781   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
782   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
783   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
784   *partition = p->partition;
785   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
786   PetscFunctionReturn(0);
787 }
788 
789 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
790 {
791   PetscFunctionBegin;
792   part->ops->view           = PetscPartitionerView_Shell;
793   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
794   part->ops->destroy        = PetscPartitionerDestroy_Shell;
795   part->ops->partition      = PetscPartitionerPartition_Shell;
796   PetscFunctionReturn(0);
797 }
798 
799 /*MC
800   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
801 
802   Level: intermediate
803 
804 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
805 M*/
806 
807 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
808 {
809   PetscPartitioner_Shell *p;
810   PetscErrorCode          ierr;
811 
812   PetscFunctionBegin;
813   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
814   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
815   part->data = p;
816 
817   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
818   p->random = PETSC_FALSE;
819   PetscFunctionReturn(0);
820 }
821 
822 /*@C
823   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
824 
825   Collective on Part
826 
827   Input Parameters:
828 + part     - The PetscPartitioner
829 . size - The number of partitions
830 . sizes    - array of size size (or NULL) providing the number of points in each partition
831 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
832 
833   Level: developer
834 
835   Notes:
836 
837     It is safe to free the sizes and points arrays after use in this routine.
838 
839 .seealso DMPlexDistribute(), PetscPartitionerCreate()
840 @*/
841 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
842 {
843   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
844   PetscInt                proc, numPoints;
845   PetscErrorCode          ierr;
846 
847   PetscFunctionBegin;
848   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
849   if (sizes) {PetscValidPointer(sizes, 3);}
850   if (sizes) {PetscValidPointer(points, 4);}
851   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
852   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
853   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
854   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
855   if (sizes) {
856     for (proc = 0; proc < size; ++proc) {
857       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
858     }
859   }
860   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
861   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
862   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
863   PetscFunctionReturn(0);
864 }
865 
866 /*@
867   PetscPartitionerShellSetRandom - Set the flag to use a random partition
868 
869   Collective on Part
870 
871   Input Parameters:
872 + part   - The PetscPartitioner
873 - random - The flag to use a random partition
874 
875   Level: intermediate
876 
877 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
878 @*/
879 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
880 {
881   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
882 
883   PetscFunctionBegin;
884   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
885   p->random = random;
886   PetscFunctionReturn(0);
887 }
888 
889 /*@
890   PetscPartitionerShellGetRandom - get the flag to use a random partition
891 
892   Collective on Part
893 
894   Input Parameter:
895 . part   - The PetscPartitioner
896 
897   Output Parameter
898 . random - The flag to use a random partition
899 
900   Level: intermediate
901 
902 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
903 @*/
904 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
905 {
906   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
907 
908   PetscFunctionBegin;
909   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
910   PetscValidPointer(random, 2);
911   *random = p->random;
912   PetscFunctionReturn(0);
913 }
914 
915 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
916 {
917   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
918   PetscErrorCode          ierr;
919 
920   PetscFunctionBegin;
921   ierr = PetscFree(p);CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
926 {
927   PetscViewerFormat format;
928   PetscErrorCode    ierr;
929 
930   PetscFunctionBegin;
931   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
932   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
933   PetscFunctionReturn(0);
934 }
935 
936 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
937 {
938   PetscBool      iascii;
939   PetscErrorCode ierr;
940 
941   PetscFunctionBegin;
942   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
943   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
944   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
945   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
946   PetscFunctionReturn(0);
947 }
948 
949 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
950 {
951   MPI_Comm       comm;
952   PetscInt       np;
953   PetscMPIInt    size;
954   PetscErrorCode ierr;
955 
956   PetscFunctionBegin;
957   comm = PetscObjectComm((PetscObject)dm);
958   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
959   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
960   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
961   if (size == 1) {
962     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
963   }
964   else {
965     PetscMPIInt rank;
966     PetscInt nvGlobal, *offsets, myFirst, myLast;
967 
968     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
969     offsets[0] = 0;
970     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
971     for (np = 2; np <= size; np++) {
972       offsets[np] += offsets[np-1];
973     }
974     nvGlobal = offsets[size];
975     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
976     myFirst = offsets[rank];
977     myLast  = offsets[rank + 1] - 1;
978     ierr = PetscFree(offsets);CHKERRQ(ierr);
979     if (numVertices) {
980       PetscInt firstPart = 0, firstLargePart = 0;
981       PetscInt lastPart = 0, lastLargePart = 0;
982       PetscInt rem = nvGlobal % nparts;
983       PetscInt pSmall = nvGlobal/nparts;
984       PetscInt pBig = nvGlobal/nparts + 1;
985 
986 
987       if (rem) {
988         firstLargePart = myFirst / pBig;
989         lastLargePart  = myLast  / pBig;
990 
991         if (firstLargePart < rem) {
992           firstPart = firstLargePart;
993         }
994         else {
995           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
996         }
997         if (lastLargePart < rem) {
998           lastPart = lastLargePart;
999         }
1000         else {
1001           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1002         }
1003       }
1004       else {
1005         firstPart = myFirst / (nvGlobal/nparts);
1006         lastPart  = myLast  / (nvGlobal/nparts);
1007       }
1008 
1009       for (np = firstPart; np <= lastPart; np++) {
1010         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1011         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1012 
1013         PartStart = PetscMax(PartStart,myFirst);
1014         PartEnd   = PetscMin(PartEnd,myLast+1);
1015         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1016       }
1017     }
1018   }
1019   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1024 {
1025   PetscFunctionBegin;
1026   part->ops->view      = PetscPartitionerView_Simple;
1027   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1028   part->ops->partition = PetscPartitionerPartition_Simple;
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 /*MC
1033   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1034 
1035   Level: intermediate
1036 
1037 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1038 M*/
1039 
1040 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1041 {
1042   PetscPartitioner_Simple *p;
1043   PetscErrorCode           ierr;
1044 
1045   PetscFunctionBegin;
1046   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1047   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1048   part->data = p;
1049 
1050   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1055 {
1056   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1057   PetscErrorCode          ierr;
1058 
1059   PetscFunctionBegin;
1060   ierr = PetscFree(p);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1065 {
1066   PetscViewerFormat format;
1067   PetscErrorCode    ierr;
1068 
1069   PetscFunctionBegin;
1070   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1071   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1072   PetscFunctionReturn(0);
1073 }
1074 
1075 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1076 {
1077   PetscBool      iascii;
1078   PetscErrorCode ierr;
1079 
1080   PetscFunctionBegin;
1081   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1082   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1083   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1084   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1085   PetscFunctionReturn(0);
1086 }
1087 
1088 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1089 {
1090   PetscInt       np;
1091   PetscErrorCode ierr;
1092 
1093   PetscFunctionBegin;
1094   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1095   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1096   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1097   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1098   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1099   PetscFunctionReturn(0);
1100 }
1101 
1102 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1103 {
1104   PetscFunctionBegin;
1105   part->ops->view      = PetscPartitionerView_Gather;
1106   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1107   part->ops->partition = PetscPartitionerPartition_Gather;
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 /*MC
1112   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1113 
1114   Level: intermediate
1115 
1116 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1117 M*/
1118 
1119 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1120 {
1121   PetscPartitioner_Gather *p;
1122   PetscErrorCode           ierr;
1123 
1124   PetscFunctionBegin;
1125   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1126   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1127   part->data = p;
1128 
1129   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1130   PetscFunctionReturn(0);
1131 }
1132 
1133 
1134 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1135 {
1136   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1137   PetscErrorCode          ierr;
1138 
1139   PetscFunctionBegin;
1140   ierr = PetscFree(p);CHKERRQ(ierr);
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1145 {
1146   PetscViewerFormat format;
1147   PetscErrorCode    ierr;
1148 
1149   PetscFunctionBegin;
1150   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1151   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1156 {
1157   PetscBool      iascii;
1158   PetscErrorCode ierr;
1159 
1160   PetscFunctionBegin;
1161   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1162   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1163   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1164   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 #if defined(PETSC_HAVE_CHACO)
1169 #if defined(PETSC_HAVE_UNISTD_H)
1170 #include <unistd.h>
1171 #endif
1172 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1173 #include <chaco.h>
1174 #else
1175 /* Older versions of Chaco do not have an include file */
1176 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1177                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1178                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1179                        int mesh_dims[3], double *goal, int global_method, int local_method,
1180                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1181 #endif
1182 extern int FREE_GRAPH;
1183 #endif
1184 
1185 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1186 {
1187 #if defined(PETSC_HAVE_CHACO)
1188   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1189   MPI_Comm       comm;
1190   int            nvtxs          = numVertices; /* number of vertices in full graph */
1191   int           *vwgts          = NULL;   /* weights for all vertices */
1192   float         *ewgts          = NULL;   /* weights for all edges */
1193   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1194   char          *outassignname  = NULL;   /*  name of assignment output file */
1195   char          *outfilename    = NULL;   /* output file name */
1196   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1197   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1198   int            mesh_dims[3];            /* dimensions of mesh of processors */
1199   double        *goal          = NULL;    /* desired set sizes for each set */
1200   int            global_method = 1;       /* global partitioning algorithm */
1201   int            local_method  = 1;       /* local partitioning algorithm */
1202   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1203   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1204   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1205   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1206   long           seed          = 123636512; /* for random graph mutations */
1207 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1208   int           *assignment;              /* Output partition */
1209 #else
1210   short int     *assignment;              /* Output partition */
1211 #endif
1212   int            fd_stdout, fd_pipe[2];
1213   PetscInt      *points;
1214   int            i, v, p;
1215   PetscErrorCode ierr;
1216 
1217   PetscFunctionBegin;
1218   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1219   if (!numVertices) {
1220     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1221     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1222     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1223     PetscFunctionReturn(0);
1224   }
1225   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1226   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1227 
1228   if (global_method == INERTIAL_METHOD) {
1229     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1230     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1231   }
1232   mesh_dims[0] = nparts;
1233   mesh_dims[1] = 1;
1234   mesh_dims[2] = 1;
1235   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1236   /* Chaco outputs to stdout. We redirect this to a buffer. */
1237   /* TODO: check error codes for UNIX calls */
1238 #if defined(PETSC_HAVE_UNISTD_H)
1239   {
1240     int piperet;
1241     piperet = pipe(fd_pipe);
1242     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1243     fd_stdout = dup(1);
1244     close(1);
1245     dup2(fd_pipe[1], 1);
1246   }
1247 #endif
1248   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1249                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1250                    vmax, ndims, eigtol, seed);
1251 #if defined(PETSC_HAVE_UNISTD_H)
1252   {
1253     char msgLog[10000];
1254     int  count;
1255 
1256     fflush(stdout);
1257     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1258     if (count < 0) count = 0;
1259     msgLog[count] = 0;
1260     close(1);
1261     dup2(fd_stdout, 1);
1262     close(fd_stdout);
1263     close(fd_pipe[0]);
1264     close(fd_pipe[1]);
1265     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1266   }
1267 #endif
1268   /* Convert to PetscSection+IS */
1269   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1270   for (v = 0; v < nvtxs; ++v) {
1271     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1272   }
1273   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1274   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1275   for (p = 0, i = 0; p < nparts; ++p) {
1276     for (v = 0; v < nvtxs; ++v) {
1277       if (assignment[v] == p) points[i++] = v;
1278     }
1279   }
1280   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1281   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1282   if (global_method == INERTIAL_METHOD) {
1283     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1284   }
1285   ierr = PetscFree(assignment);CHKERRQ(ierr);
1286   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1287   PetscFunctionReturn(0);
1288 #else
1289   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1290 #endif
1291 }
1292 
1293 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1294 {
1295   PetscFunctionBegin;
1296   part->ops->view      = PetscPartitionerView_Chaco;
1297   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1298   part->ops->partition = PetscPartitionerPartition_Chaco;
1299   PetscFunctionReturn(0);
1300 }
1301 
1302 /*MC
1303   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1304 
1305   Level: intermediate
1306 
1307 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1308 M*/
1309 
1310 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1311 {
1312   PetscPartitioner_Chaco *p;
1313   PetscErrorCode          ierr;
1314 
1315   PetscFunctionBegin;
1316   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1317   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1318   part->data = p;
1319 
1320   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1321   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1322   PetscFunctionReturn(0);
1323 }
1324 
1325 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1326 {
1327   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1328   PetscErrorCode             ierr;
1329 
1330   PetscFunctionBegin;
1331   ierr = PetscFree(p);CHKERRQ(ierr);
1332   PetscFunctionReturn(0);
1333 }
1334 
1335 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1336 {
1337   PetscViewerFormat format;
1338   PetscErrorCode    ierr;
1339 
1340   PetscFunctionBegin;
1341   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1342   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1347 {
1348   PetscBool      iascii;
1349   PetscErrorCode ierr;
1350 
1351   PetscFunctionBegin;
1352   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1353   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1354   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1355   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1356   PetscFunctionReturn(0);
1357 }
1358 
1359 #if defined(PETSC_HAVE_PARMETIS)
1360 #include <parmetis.h>
1361 #endif
1362 
1363 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1364 {
1365 #if defined(PETSC_HAVE_PARMETIS)
1366   MPI_Comm       comm;
1367   PetscSection   section;
1368   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1369   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1370   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1371   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1372   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1373   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1374   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1375   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1376   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1377   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1378   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1379   PetscInt       options[5];               /* Options */
1380   /* Outputs */
1381   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1382   PetscInt      *assignment, *points;
1383   PetscMPIInt    rank, p, v, i;
1384   PetscErrorCode ierr;
1385 
1386   PetscFunctionBegin;
1387   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1388   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1389   options[0] = 0; /* Use all defaults */
1390   /* Calculate vertex distribution */
1391   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1392   vtxdist[0] = 0;
1393   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1394   for (p = 2; p <= nparts; ++p) {
1395     vtxdist[p] += vtxdist[p-1];
1396   }
1397   /* Calculate weights */
1398   for (p = 0; p < nparts; ++p) {
1399     tpwgts[p] = 1.0/nparts;
1400   }
1401   ubvec[0] = 1.05;
1402   /* Weight cells by dofs on cell by default */
1403   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1404   if (section) {
1405     PetscInt cStart, cEnd, dof;
1406 
1407     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1408     for (v = cStart; v < cEnd; ++v) {
1409       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1410       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1411       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1412       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1413     }
1414   } else {
1415     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1416   }
1417 
1418   if (nparts == 1) {
1419     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1420   } else {
1421     if (vtxdist[1] == vtxdist[nparts]) {
1422       if (!rank) {
1423         PetscStackPush("METIS_PartGraphKway");
1424         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1425         PetscStackPop;
1426         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1427       }
1428     } else {
1429       PetscStackPush("ParMETIS_V3_PartKway");
1430       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1431       PetscStackPop;
1432       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1433     }
1434   }
1435   /* Convert to PetscSection+IS */
1436   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1437   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1438   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1439   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1440   for (p = 0, i = 0; p < nparts; ++p) {
1441     for (v = 0; v < nvtxs; ++v) {
1442       if (assignment[v] == p) points[i++] = v;
1443     }
1444   }
1445   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1446   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1447   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1448   PetscFunctionReturn(0);
1449 #else
1450   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1451 #endif
1452 }
1453 
1454 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1455 {
1456   PetscFunctionBegin;
1457   part->ops->view      = PetscPartitionerView_ParMetis;
1458   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1459   part->ops->partition = PetscPartitionerPartition_ParMetis;
1460   PetscFunctionReturn(0);
1461 }
1462 
1463 /*MC
1464   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1465 
1466   Level: intermediate
1467 
1468 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1469 M*/
1470 
1471 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1472 {
1473   PetscPartitioner_ParMetis *p;
1474   PetscErrorCode          ierr;
1475 
1476   PetscFunctionBegin;
1477   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1478   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1479   part->data = p;
1480 
1481   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1482   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1483   PetscFunctionReturn(0);
1484 }
1485 
1486 /*@
1487   DMPlexGetPartitioner - Get the mesh partitioner
1488 
1489   Not collective
1490 
1491   Input Parameter:
1492 . dm - The DM
1493 
1494   Output Parameter:
1495 . part - The PetscPartitioner
1496 
1497   Level: developer
1498 
1499   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1500 
1501 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1502 @*/
1503 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1504 {
1505   DM_Plex *mesh = (DM_Plex *) dm->data;
1506 
1507   PetscFunctionBegin;
1508   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1509   PetscValidPointer(part, 2);
1510   *part = mesh->partitioner;
1511   PetscFunctionReturn(0);
1512 }
1513 
1514 /*@
1515   DMPlexSetPartitioner - Set the mesh partitioner
1516 
1517   logically collective on dm and part
1518 
1519   Input Parameters:
1520 + dm - The DM
1521 - part - The partitioner
1522 
1523   Level: developer
1524 
1525   Note: Any existing PetscPartitioner will be destroyed.
1526 
1527 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1528 @*/
1529 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1530 {
1531   DM_Plex       *mesh = (DM_Plex *) dm->data;
1532   PetscErrorCode ierr;
1533 
1534   PetscFunctionBegin;
1535   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1536   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1537   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1538   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1539   mesh->partitioner = part;
1540   PetscFunctionReturn(0);
1541 }
1542 
1543 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1544 {
1545   PetscErrorCode ierr;
1546 
1547   PetscFunctionBegin;
1548   if (up) {
1549     PetscInt parent;
1550 
1551     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1552     if (parent != point) {
1553       PetscInt closureSize, *closure = NULL, i;
1554 
1555       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1556       for (i = 0; i < closureSize; i++) {
1557         PetscInt cpoint = closure[2*i];
1558 
1559         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1560         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1561       }
1562       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1563     }
1564   }
1565   if (down) {
1566     PetscInt numChildren;
1567     const PetscInt *children;
1568 
1569     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1570     if (numChildren) {
1571       PetscInt i;
1572 
1573       for (i = 0; i < numChildren; i++) {
1574         PetscInt cpoint = children[i];
1575 
1576         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1577         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1578       }
1579     }
1580   }
1581   PetscFunctionReturn(0);
1582 }
1583 
1584 /*@
1585   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1586 
1587   Input Parameters:
1588 + dm     - The DM
1589 - label  - DMLabel assinging ranks to remote roots
1590 
1591   Level: developer
1592 
1593 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1594 @*/
1595 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1596 {
1597   IS              rankIS,   pointIS;
1598   const PetscInt *ranks,   *points;
1599   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1600   PetscInt       *closure = NULL;
1601   PetscErrorCode  ierr;
1602 
1603   PetscFunctionBegin;
1604   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1605   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1606   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1607   for (r = 0; r < numRanks; ++r) {
1608     const PetscInt rank = ranks[r];
1609 
1610     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1611     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1612     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1613     for (p = 0; p < numPoints; ++p) {
1614       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1615       for (c = 0; c < closureSize*2; c += 2) {
1616         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1617         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1618       }
1619     }
1620     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1621     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1622   }
1623   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1624   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1625   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1626   PetscFunctionReturn(0);
1627 }
1628 
1629 /*@
1630   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1631 
1632   Input Parameters:
1633 + dm     - The DM
1634 - label  - DMLabel assinging ranks to remote roots
1635 
1636   Level: developer
1637 
1638 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1639 @*/
1640 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1641 {
1642   IS              rankIS,   pointIS;
1643   const PetscInt *ranks,   *points;
1644   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1645   PetscInt       *adj = NULL;
1646   PetscErrorCode  ierr;
1647 
1648   PetscFunctionBegin;
1649   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1650   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1651   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1652   for (r = 0; r < numRanks; ++r) {
1653     const PetscInt rank = ranks[r];
1654 
1655     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1656     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1657     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1658     for (p = 0; p < numPoints; ++p) {
1659       adjSize = PETSC_DETERMINE;
1660       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1661       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1662     }
1663     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1664     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1665   }
1666   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1667   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1668   ierr = PetscFree(adj);CHKERRQ(ierr);
1669   PetscFunctionReturn(0);
1670 }
1671 
1672 /*@
1673   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1674 
1675   Input Parameters:
1676 + dm     - The DM
1677 - label  - DMLabel assinging ranks to remote roots
1678 
1679   Level: developer
1680 
1681   Note: This is required when generating multi-level overlaps to capture
1682   overlap points from non-neighbouring partitions.
1683 
1684 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1685 @*/
1686 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1687 {
1688   MPI_Comm        comm;
1689   PetscMPIInt     rank;
1690   PetscSF         sfPoint;
1691   DMLabel         lblRoots, lblLeaves;
1692   IS              rankIS, pointIS;
1693   const PetscInt *ranks;
1694   PetscInt        numRanks, r;
1695   PetscErrorCode  ierr;
1696 
1697   PetscFunctionBegin;
1698   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1699   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1700   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1701   /* Pull point contributions from remote leaves into local roots */
1702   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1703   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1704   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1705   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1706   for (r = 0; r < numRanks; ++r) {
1707     const PetscInt remoteRank = ranks[r];
1708     if (remoteRank == rank) continue;
1709     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1710     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1711     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1712   }
1713   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1714   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1715   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1716   /* Push point contributions from roots into remote leaves */
1717   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1718   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1719   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1720   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1721   for (r = 0; r < numRanks; ++r) {
1722     const PetscInt remoteRank = ranks[r];
1723     if (remoteRank == rank) continue;
1724     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1725     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1726     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1727   }
1728   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1729   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1730   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1731   PetscFunctionReturn(0);
1732 }
1733 
1734 /*@
1735   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1736 
1737   Input Parameters:
1738 + dm        - The DM
1739 . rootLabel - DMLabel assinging ranks to local roots
1740 . processSF - A star forest mapping into the local index on each remote rank
1741 
1742   Output Parameter:
1743 - leafLabel - DMLabel assinging ranks to remote roots
1744 
1745   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1746   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1747 
1748   Level: developer
1749 
1750 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1751 @*/
1752 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1753 {
1754   MPI_Comm           comm;
1755   PetscMPIInt        rank, size;
1756   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
1757   PetscSF            sfPoint;
1758   PetscSFNode       *rootPoints, *leafPoints;
1759   PetscSection       rootSection, leafSection;
1760   const PetscSFNode *remote;
1761   const PetscInt    *local, *neighbors;
1762   IS                 valueIS;
1763   PetscErrorCode     ierr;
1764 
1765   PetscFunctionBegin;
1766   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1767   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1768   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1769   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1770 
1771   /* Convert to (point, rank) and use actual owners */
1772   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1773   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
1774   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1775   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1776   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1777   for (n = 0; n < numNeighbors; ++n) {
1778     PetscInt numPoints;
1779 
1780     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1781     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1782   }
1783   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1784   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
1785   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
1786   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1787   for (n = 0; n < numNeighbors; ++n) {
1788     IS              pointIS;
1789     const PetscInt *points;
1790     PetscInt        off, numPoints, p;
1791 
1792     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1793     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1794     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1795     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1796     for (p = 0; p < numPoints; ++p) {
1797       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1798       else       {l = -1;}
1799       if (l >= 0) {rootPoints[off+p] = remote[l];}
1800       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1801     }
1802     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1803     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1804   }
1805   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1806   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1807   /* Communicate overlap */
1808   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1809   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1810   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1811   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
1812   for (p = 0; p < ssize; p++) {
1813     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1814   }
1815   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1816   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1817   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1818   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1819   PetscFunctionReturn(0);
1820 }
1821 
1822 /*@
1823   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1824 
1825   Input Parameters:
1826 + dm    - The DM
1827 . label - DMLabel assinging ranks to remote roots
1828 
1829   Output Parameter:
1830 - sf    - The star forest communication context encapsulating the defined mapping
1831 
1832   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1833 
1834   Level: developer
1835 
1836 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1837 @*/
1838 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1839 {
1840   PetscMPIInt     rank, size;
1841   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1842   PetscSFNode    *remotePoints;
1843   IS              remoteRootIS;
1844   const PetscInt *remoteRoots;
1845   PetscErrorCode ierr;
1846 
1847   PetscFunctionBegin;
1848   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1849   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1850 
1851   for (numRemote = 0, n = 0; n < size; ++n) {
1852     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1853     numRemote += numPoints;
1854   }
1855   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1856   /* Put owned points first */
1857   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1858   if (numPoints > 0) {
1859     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1860     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1861     for (p = 0; p < numPoints; p++) {
1862       remotePoints[idx].index = remoteRoots[p];
1863       remotePoints[idx].rank = rank;
1864       idx++;
1865     }
1866     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1867     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1868   }
1869   /* Now add remote points */
1870   for (n = 0; n < size; ++n) {
1871     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1872     if (numPoints <= 0 || n == rank) continue;
1873     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1874     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1875     for (p = 0; p < numPoints; p++) {
1876       remotePoints[idx].index = remoteRoots[p];
1877       remotePoints[idx].rank = n;
1878       idx++;
1879     }
1880     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1881     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1882   }
1883   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1884   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1885   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1886   PetscFunctionReturn(0);
1887 }
1888