xref: /petsc/src/dm/impls/plex/plexpartition.c (revision b3ce585b28d50f161f07cbf650311edea5c1cb6a)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410     part->ops->destroy = NULL;
411   }
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
470 {
471   PetscFunctionBegin;
472   if (!currentType) {
473 #if defined(PETSC_HAVE_CHACO)
474     *defaultType = PETSCPARTITIONERCHACO;
475 #elif defined(PETSC_HAVE_PARMETIS)
476     *defaultType = PETSCPARTITIONERPARMETIS;
477 #else
478     *defaultType = PETSCPARTITIONERSIMPLE;
479 #endif
480   } else {
481     *defaultType = currentType;
482   }
483   PetscFunctionReturn(0);
484 }
485 
486 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
487 {
488   const char    *defaultType;
489   char           name[256];
490   PetscBool      flg;
491   PetscErrorCode ierr;
492 
493   PetscFunctionBegin;
494   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
495   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
496   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
497 
498   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
499   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
500   if (flg) {
501     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
502   } else if (!((PetscObject) part)->type_name) {
503     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
504   }
505   ierr = PetscOptionsEnd();CHKERRQ(ierr);
506   PetscFunctionReturn(0);
507 }
508 
509 /*@
510   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
511 
512   Collective on PetscPartitioner
513 
514   Input Parameter:
515 . part - the PetscPartitioner object to set options for
516 
517   Level: developer
518 
519 .seealso: PetscPartitionerView()
520 @*/
521 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
522 {
523   PetscErrorCode ierr;
524 
525   PetscFunctionBegin;
526   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
527   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
528 
529   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
530   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
531   /* process any options handlers added with PetscObjectAddOptionsHandler() */
532   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
533   ierr = PetscOptionsEnd();CHKERRQ(ierr);
534   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
535   PetscFunctionReturn(0);
536 }
537 
538 /*@C
539   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
540 
541   Collective on PetscPartitioner
542 
543   Input Parameter:
544 . part - the PetscPartitioner object to setup
545 
546   Level: developer
547 
548 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
549 @*/
550 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
551 {
552   PetscErrorCode ierr;
553 
554   PetscFunctionBegin;
555   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
556   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
557   PetscFunctionReturn(0);
558 }
559 
560 /*@
561   PetscPartitionerDestroy - Destroys a PetscPartitioner object
562 
563   Collective on PetscPartitioner
564 
565   Input Parameter:
566 . part - the PetscPartitioner object to destroy
567 
568   Level: developer
569 
570 .seealso: PetscPartitionerView()
571 @*/
572 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
573 {
574   PetscErrorCode ierr;
575 
576   PetscFunctionBegin;
577   if (!*part) PetscFunctionReturn(0);
578   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
579 
580   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
581   ((PetscObject) (*part))->refct = 0;
582 
583   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
584   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
585   PetscFunctionReturn(0);
586 }
587 
588 /*@
589   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
590 
591   Collective on MPI_Comm
592 
593   Input Parameter:
594 . comm - The communicator for the PetscPartitioner object
595 
596   Output Parameter:
597 . part - The PetscPartitioner object
598 
599   Level: beginner
600 
601 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
602 @*/
603 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
604 {
605   PetscPartitioner p;
606   const char       *partitionerType = NULL;
607   PetscErrorCode   ierr;
608 
609   PetscFunctionBegin;
610   PetscValidPointer(part, 2);
611   *part = NULL;
612   ierr = DMInitializePackage();CHKERRQ(ierr);
613 
614   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
615   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
616   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
617 
618   *part = p;
619   PetscFunctionReturn(0);
620 }
621 
622 /*@
623   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
624 
625   Collective on DM
626 
627   Input Parameters:
628 + part    - The PetscPartitioner
629 - dm      - The mesh DM
630 
631   Output Parameters:
632 + partSection     - The PetscSection giving the division of points by partition
633 - partition       - The list of points by partition
634 
635   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
636 
637   Level: developer
638 
639 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
640 @*/
641 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
642 {
643   PetscMPIInt    size;
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
648   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
649   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
650   PetscValidPointer(partition, 5);
651   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
652   if (size == 1) {
653     PetscInt *points;
654     PetscInt  cStart, cEnd, c;
655 
656     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
657     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
658     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
659     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
660     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
661     for (c = cStart; c < cEnd; ++c) points[c] = c;
662     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
663     PetscFunctionReturn(0);
664   }
665   if (part->height == 0) {
666     PetscInt numVertices;
667     PetscInt *start     = NULL;
668     PetscInt *adjacency = NULL;
669     IS       globalNumbering;
670 
671     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
672     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
673     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
674     ierr = PetscFree(start);CHKERRQ(ierr);
675     ierr = PetscFree(adjacency);CHKERRQ(ierr);
676     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
677       const PetscInt *globalNum;
678       const PetscInt *partIdx;
679       PetscInt *map, cStart, cEnd;
680       PetscInt *adjusted, i, localSize, offset;
681       IS    newPartition;
682 
683       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
684       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
685       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
686       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
687       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
688       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
689       for (i = cStart, offset = 0; i < cEnd; i++) {
690         if (globalNum[i - cStart] >= 0) map[offset++] = i;
691       }
692       for (i = 0; i < localSize; i++) {
693         adjusted[i] = map[partIdx[i]];
694       }
695       ierr = PetscFree(map);CHKERRQ(ierr);
696       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
697       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
698       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
699       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
700       ierr = ISDestroy(partition);CHKERRQ(ierr);
701       *partition = newPartition;
702     }
703   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
704   PetscFunctionReturn(0);
705 
706 }
707 
708 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
709 {
710   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
711   PetscErrorCode          ierr;
712 
713   PetscFunctionBegin;
714   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
715   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
716   ierr = PetscFree(p);CHKERRQ(ierr);
717   PetscFunctionReturn(0);
718 }
719 
720 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
721 {
722   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
723   PetscViewerFormat       format;
724   PetscErrorCode          ierr;
725 
726   PetscFunctionBegin;
727   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
728   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
729   if (p->random) {
730     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
731     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
732     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
733   }
734   PetscFunctionReturn(0);
735 }
736 
737 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
738 {
739   PetscBool      iascii;
740   PetscErrorCode ierr;
741 
742   PetscFunctionBegin;
743   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
744   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
745   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
746   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
747   PetscFunctionReturn(0);
748 }
749 
750 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
751 {
752   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
753   PetscErrorCode          ierr;
754 
755   PetscFunctionBegin;
756   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
757   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
758   ierr = PetscOptionsTail();CHKERRQ(ierr);
759   PetscFunctionReturn(0);
760 }
761 
762 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
763 {
764   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
765   PetscInt                np;
766   PetscErrorCode          ierr;
767 
768   PetscFunctionBegin;
769   if (p->random) {
770     PetscRandom r;
771     PetscInt   *sizes, *points, v, p;
772     PetscMPIInt rank;
773 
774     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
775     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
776     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
777     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
778     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
779     for (v = 0; v < numVertices; ++v) {points[v] = v;}
780     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
781     for (v = numVertices-1; v > 0; --v) {
782       PetscReal val;
783       PetscInt  w, tmp;
784 
785       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
786       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
787       w    = PetscFloorReal(val);
788       tmp       = points[v];
789       points[v] = points[w];
790       points[w] = tmp;
791     }
792     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
793     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
794     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
795   }
796   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
797   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
798   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
799   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
800   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
801   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
802   *partition = p->partition;
803   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
804   PetscFunctionReturn(0);
805 }
806 
807 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
808 {
809   PetscFunctionBegin;
810   part->ops->view           = PetscPartitionerView_Shell;
811   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
812   part->ops->destroy        = PetscPartitionerDestroy_Shell;
813   part->ops->partition      = PetscPartitionerPartition_Shell;
814   PetscFunctionReturn(0);
815 }
816 
817 /*MC
818   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
819 
820   Level: intermediate
821 
822 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
823 M*/
824 
825 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
826 {
827   PetscPartitioner_Shell *p;
828   PetscErrorCode          ierr;
829 
830   PetscFunctionBegin;
831   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
832   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
833   part->data = p;
834 
835   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
836   p->random = PETSC_FALSE;
837   PetscFunctionReturn(0);
838 }
839 
840 /*@C
841   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
842 
843   Collective on Part
844 
845   Input Parameters:
846 + part     - The PetscPartitioner
847 . size - The number of partitions
848 . sizes    - array of size size (or NULL) providing the number of points in each partition
849 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
850 
851   Level: developer
852 
853   Notes:
854 
855     It is safe to free the sizes and points arrays after use in this routine.
856 
857 .seealso DMPlexDistribute(), PetscPartitionerCreate()
858 @*/
859 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
860 {
861   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
862   PetscInt                proc, numPoints;
863   PetscErrorCode          ierr;
864 
865   PetscFunctionBegin;
866   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
867   if (sizes)  {PetscValidPointer(sizes, 3);}
868   if (points) {PetscValidPointer(points, 4);}
869   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
870   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
871   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
872   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
873   if (sizes) {
874     for (proc = 0; proc < size; ++proc) {
875       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
876     }
877   }
878   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
879   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
880   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
881   PetscFunctionReturn(0);
882 }
883 
884 /*@
885   PetscPartitionerShellSetRandom - Set the flag to use a random partition
886 
887   Collective on Part
888 
889   Input Parameters:
890 + part   - The PetscPartitioner
891 - random - The flag to use a random partition
892 
893   Level: intermediate
894 
895 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
896 @*/
897 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
898 {
899   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
900 
901   PetscFunctionBegin;
902   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
903   p->random = random;
904   PetscFunctionReturn(0);
905 }
906 
907 /*@
908   PetscPartitionerShellGetRandom - get the flag to use a random partition
909 
910   Collective on Part
911 
912   Input Parameter:
913 . part   - The PetscPartitioner
914 
915   Output Parameter
916 . random - The flag to use a random partition
917 
918   Level: intermediate
919 
920 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
921 @*/
922 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
923 {
924   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
925 
926   PetscFunctionBegin;
927   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
928   PetscValidPointer(random, 2);
929   *random = p->random;
930   PetscFunctionReturn(0);
931 }
932 
933 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
934 {
935   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
936   PetscErrorCode          ierr;
937 
938   PetscFunctionBegin;
939   ierr = PetscFree(p);CHKERRQ(ierr);
940   PetscFunctionReturn(0);
941 }
942 
943 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
944 {
945   PetscViewerFormat format;
946   PetscErrorCode    ierr;
947 
948   PetscFunctionBegin;
949   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
950   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
955 {
956   PetscBool      iascii;
957   PetscErrorCode ierr;
958 
959   PetscFunctionBegin;
960   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
961   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
962   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
963   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
964   PetscFunctionReturn(0);
965 }
966 
967 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
968 {
969   MPI_Comm       comm;
970   PetscInt       np;
971   PetscMPIInt    size;
972   PetscErrorCode ierr;
973 
974   PetscFunctionBegin;
975   comm = PetscObjectComm((PetscObject)dm);
976   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
977   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
978   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
979   if (size == 1) {
980     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
981   }
982   else {
983     PetscMPIInt rank;
984     PetscInt nvGlobal, *offsets, myFirst, myLast;
985 
986     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
987     offsets[0] = 0;
988     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
989     for (np = 2; np <= size; np++) {
990       offsets[np] += offsets[np-1];
991     }
992     nvGlobal = offsets[size];
993     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
994     myFirst = offsets[rank];
995     myLast  = offsets[rank + 1] - 1;
996     ierr = PetscFree(offsets);CHKERRQ(ierr);
997     if (numVertices) {
998       PetscInt firstPart = 0, firstLargePart = 0;
999       PetscInt lastPart = 0, lastLargePart = 0;
1000       PetscInt rem = nvGlobal % nparts;
1001       PetscInt pSmall = nvGlobal/nparts;
1002       PetscInt pBig = nvGlobal/nparts + 1;
1003 
1004 
1005       if (rem) {
1006         firstLargePart = myFirst / pBig;
1007         lastLargePart  = myLast  / pBig;
1008 
1009         if (firstLargePart < rem) {
1010           firstPart = firstLargePart;
1011         }
1012         else {
1013           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1014         }
1015         if (lastLargePart < rem) {
1016           lastPart = lastLargePart;
1017         }
1018         else {
1019           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1020         }
1021       }
1022       else {
1023         firstPart = myFirst / (nvGlobal/nparts);
1024         lastPart  = myLast  / (nvGlobal/nparts);
1025       }
1026 
1027       for (np = firstPart; np <= lastPart; np++) {
1028         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1029         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1030 
1031         PartStart = PetscMax(PartStart,myFirst);
1032         PartEnd   = PetscMin(PartEnd,myLast+1);
1033         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1034       }
1035     }
1036   }
1037   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1042 {
1043   PetscFunctionBegin;
1044   part->ops->view      = PetscPartitionerView_Simple;
1045   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1046   part->ops->partition = PetscPartitionerPartition_Simple;
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 /*MC
1051   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1052 
1053   Level: intermediate
1054 
1055 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1056 M*/
1057 
1058 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1059 {
1060   PetscPartitioner_Simple *p;
1061   PetscErrorCode           ierr;
1062 
1063   PetscFunctionBegin;
1064   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1065   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1066   part->data = p;
1067 
1068   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1073 {
1074   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1075   PetscErrorCode          ierr;
1076 
1077   PetscFunctionBegin;
1078   ierr = PetscFree(p);CHKERRQ(ierr);
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1083 {
1084   PetscViewerFormat format;
1085   PetscErrorCode    ierr;
1086 
1087   PetscFunctionBegin;
1088   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1089   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1090   PetscFunctionReturn(0);
1091 }
1092 
1093 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1094 {
1095   PetscBool      iascii;
1096   PetscErrorCode ierr;
1097 
1098   PetscFunctionBegin;
1099   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1100   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1101   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1102   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1107 {
1108   PetscInt       np;
1109   PetscErrorCode ierr;
1110 
1111   PetscFunctionBegin;
1112   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1113   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1114   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1115   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1116   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1121 {
1122   PetscFunctionBegin;
1123   part->ops->view      = PetscPartitionerView_Gather;
1124   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1125   part->ops->partition = PetscPartitionerPartition_Gather;
1126   PetscFunctionReturn(0);
1127 }
1128 
1129 /*MC
1130   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1131 
1132   Level: intermediate
1133 
1134 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1135 M*/
1136 
1137 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1138 {
1139   PetscPartitioner_Gather *p;
1140   PetscErrorCode           ierr;
1141 
1142   PetscFunctionBegin;
1143   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1144   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1145   part->data = p;
1146 
1147   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1148   PetscFunctionReturn(0);
1149 }
1150 
1151 
1152 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1153 {
1154   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1155   PetscErrorCode          ierr;
1156 
1157   PetscFunctionBegin;
1158   ierr = PetscFree(p);CHKERRQ(ierr);
1159   PetscFunctionReturn(0);
1160 }
1161 
1162 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1163 {
1164   PetscViewerFormat format;
1165   PetscErrorCode    ierr;
1166 
1167   PetscFunctionBegin;
1168   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1169   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1170   PetscFunctionReturn(0);
1171 }
1172 
1173 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1174 {
1175   PetscBool      iascii;
1176   PetscErrorCode ierr;
1177 
1178   PetscFunctionBegin;
1179   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1180   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1181   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1182   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1183   PetscFunctionReturn(0);
1184 }
1185 
1186 #if defined(PETSC_HAVE_CHACO)
1187 #if defined(PETSC_HAVE_UNISTD_H)
1188 #include <unistd.h>
1189 #endif
1190 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1191 #include <chaco.h>
1192 #else
1193 /* Older versions of Chaco do not have an include file */
1194 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1195                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1196                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1197                        int mesh_dims[3], double *goal, int global_method, int local_method,
1198                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1199 #endif
1200 extern int FREE_GRAPH;
1201 #endif
1202 
1203 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1204 {
1205 #if defined(PETSC_HAVE_CHACO)
1206   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1207   MPI_Comm       comm;
1208   int            nvtxs          = numVertices; /* number of vertices in full graph */
1209   int           *vwgts          = NULL;   /* weights for all vertices */
1210   float         *ewgts          = NULL;   /* weights for all edges */
1211   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1212   char          *outassignname  = NULL;   /*  name of assignment output file */
1213   char          *outfilename    = NULL;   /* output file name */
1214   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1215   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1216   int            mesh_dims[3];            /* dimensions of mesh of processors */
1217   double        *goal          = NULL;    /* desired set sizes for each set */
1218   int            global_method = 1;       /* global partitioning algorithm */
1219   int            local_method  = 1;       /* local partitioning algorithm */
1220   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1221   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1222   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1223   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1224   long           seed          = 123636512; /* for random graph mutations */
1225 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1226   int           *assignment;              /* Output partition */
1227 #else
1228   short int     *assignment;              /* Output partition */
1229 #endif
1230   int            fd_stdout, fd_pipe[2];
1231   PetscInt      *points;
1232   int            i, v, p;
1233   PetscErrorCode ierr;
1234 
1235   PetscFunctionBegin;
1236   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1237 #if defined (PETSC_USE_DEBUG)
1238   {
1239     int ival,isum;
1240     PetscBool distributed;
1241 
1242     ival = (numVertices > 0);
1243     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1244     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1245     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1246   }
1247 #endif
1248   if (!numVertices) {
1249     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1250     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1251     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1252     PetscFunctionReturn(0);
1253   }
1254   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1255   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1256 
1257   if (global_method == INERTIAL_METHOD) {
1258     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1259     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1260   }
1261   mesh_dims[0] = nparts;
1262   mesh_dims[1] = 1;
1263   mesh_dims[2] = 1;
1264   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1265   /* Chaco outputs to stdout. We redirect this to a buffer. */
1266   /* TODO: check error codes for UNIX calls */
1267 #if defined(PETSC_HAVE_UNISTD_H)
1268   {
1269     int piperet;
1270     piperet = pipe(fd_pipe);
1271     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1272     fd_stdout = dup(1);
1273     close(1);
1274     dup2(fd_pipe[1], 1);
1275   }
1276 #endif
1277   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1278                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1279                    vmax, ndims, eigtol, seed);
1280 #if defined(PETSC_HAVE_UNISTD_H)
1281   {
1282     char msgLog[10000];
1283     int  count;
1284 
1285     fflush(stdout);
1286     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1287     if (count < 0) count = 0;
1288     msgLog[count] = 0;
1289     close(1);
1290     dup2(fd_stdout, 1);
1291     close(fd_stdout);
1292     close(fd_pipe[0]);
1293     close(fd_pipe[1]);
1294     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1295   }
1296 #else
1297   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1298 #endif
1299   /* Convert to PetscSection+IS */
1300   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1301   for (v = 0; v < nvtxs; ++v) {
1302     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1303   }
1304   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1305   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1306   for (p = 0, i = 0; p < nparts; ++p) {
1307     for (v = 0; v < nvtxs; ++v) {
1308       if (assignment[v] == p) points[i++] = v;
1309     }
1310   }
1311   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1312   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1313   if (global_method == INERTIAL_METHOD) {
1314     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1315   }
1316   ierr = PetscFree(assignment);CHKERRQ(ierr);
1317   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1318   PetscFunctionReturn(0);
1319 #else
1320   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1321 #endif
1322 }
1323 
1324 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1325 {
1326   PetscFunctionBegin;
1327   part->ops->view      = PetscPartitionerView_Chaco;
1328   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1329   part->ops->partition = PetscPartitionerPartition_Chaco;
1330   PetscFunctionReturn(0);
1331 }
1332 
1333 /*MC
1334   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1335 
1336   Level: intermediate
1337 
1338 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1339 M*/
1340 
1341 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1342 {
1343   PetscPartitioner_Chaco *p;
1344   PetscErrorCode          ierr;
1345 
1346   PetscFunctionBegin;
1347   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1348   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1349   part->data = p;
1350 
1351   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1352   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1353   PetscFunctionReturn(0);
1354 }
1355 
1356 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1357 {
1358   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1359   PetscErrorCode             ierr;
1360 
1361   PetscFunctionBegin;
1362   ierr = PetscFree(p);CHKERRQ(ierr);
1363   PetscFunctionReturn(0);
1364 }
1365 
1366 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1367 {
1368   PetscViewerFormat format;
1369   PetscErrorCode    ierr;
1370 
1371   PetscFunctionBegin;
1372   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1373   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1378 {
1379   PetscBool      iascii;
1380   PetscErrorCode ierr;
1381 
1382   PetscFunctionBegin;
1383   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1384   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1385   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1386   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1387   PetscFunctionReturn(0);
1388 }
1389 
1390 #if defined(PETSC_HAVE_PARMETIS)
1391 #include <parmetis.h>
1392 #endif
1393 
1394 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1395 {
1396 #if defined(PETSC_HAVE_PARMETIS)
1397   MPI_Comm       comm;
1398   PetscSection   section;
1399   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1400   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1401   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1402   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1403   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1404   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1405   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1406   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1407   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1408   real_t        *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1409   real_t        *ubvec;                    /* The balance intolerance for vertex weights */
1410   PetscInt       options[64];              /* Options */
1411   /* Outputs */
1412   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1413   PetscInt       v, i, *assignment, *points;
1414   PetscMPIInt    size, rank, p;
1415   PetscErrorCode ierr;
1416 
1417   PetscFunctionBegin;
1418   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1419   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1420   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1421   options[0] = 0; /* Use all defaults */
1422   /* Calculate vertex distribution */
1423   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1424   vtxdist[0] = 0;
1425   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1426   for (p = 2; p <= size; ++p) {
1427     vtxdist[p] += vtxdist[p-1];
1428   }
1429   /* Calculate weights */
1430   for (p = 0; p < nparts; ++p) {
1431     tpwgts[p] = 1.0/nparts;
1432   }
1433   ubvec[0] = 1.05;
1434   /* Weight cells by dofs on cell by default */
1435   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1436   if (section) {
1437     PetscInt cStart, cEnd, dof;
1438 
1439     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1440     for (v = cStart; v < cEnd; ++v) {
1441       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1442       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1443       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1444       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1445     }
1446   } else {
1447     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1448   }
1449 
1450   if (nparts == 1) {
1451     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1452   } else {
1453     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1454     if (vtxdist[p+1] == vtxdist[size]) {
1455       if (rank == p) {
1456         PetscStackPush("METIS_PartGraphKway");
1457         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1458         PetscStackPop;
1459         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1460       }
1461     } else {
1462       PetscStackPush("ParMETIS_V3_PartKway");
1463       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1464       PetscStackPop;
1465       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1466     }
1467   }
1468   /* Convert to PetscSection+IS */
1469   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1470   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1471   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1472   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1473   for (p = 0, i = 0; p < nparts; ++p) {
1474     for (v = 0; v < nvtxs; ++v) {
1475       if (assignment[v] == p) points[i++] = v;
1476     }
1477   }
1478   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1479   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1480   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1481   PetscFunctionReturn(0);
1482 #else
1483   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1484 #endif
1485 }
1486 
1487 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1488 {
1489   PetscFunctionBegin;
1490   part->ops->view      = PetscPartitionerView_ParMetis;
1491   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1492   part->ops->partition = PetscPartitionerPartition_ParMetis;
1493   PetscFunctionReturn(0);
1494 }
1495 
1496 /*MC
1497   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1498 
1499   Level: intermediate
1500 
1501 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1502 M*/
1503 
1504 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1505 {
1506   PetscPartitioner_ParMetis *p;
1507   PetscErrorCode          ierr;
1508 
1509   PetscFunctionBegin;
1510   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1511   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1512   part->data = p;
1513 
1514   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1515   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1516   PetscFunctionReturn(0);
1517 }
1518 
1519 /*@
1520   DMPlexGetPartitioner - Get the mesh partitioner
1521 
1522   Not collective
1523 
1524   Input Parameter:
1525 . dm - The DM
1526 
1527   Output Parameter:
1528 . part - The PetscPartitioner
1529 
1530   Level: developer
1531 
1532   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1533 
1534 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1535 @*/
1536 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1537 {
1538   DM_Plex       *mesh = (DM_Plex *) dm->data;
1539 
1540   PetscFunctionBegin;
1541   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1542   PetscValidPointer(part, 2);
1543   *part = mesh->partitioner;
1544   PetscFunctionReturn(0);
1545 }
1546 
1547 /*@
1548   DMPlexSetPartitioner - Set the mesh partitioner
1549 
1550   logically collective on dm and part
1551 
1552   Input Parameters:
1553 + dm - The DM
1554 - part - The partitioner
1555 
1556   Level: developer
1557 
1558   Note: Any existing PetscPartitioner will be destroyed.
1559 
1560 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1561 @*/
1562 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1563 {
1564   DM_Plex       *mesh = (DM_Plex *) dm->data;
1565   PetscErrorCode ierr;
1566 
1567   PetscFunctionBegin;
1568   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1569   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1570   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1571   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1572   mesh->partitioner = part;
1573   PetscFunctionReturn(0);
1574 }
1575 
1576 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1577 {
1578   PetscErrorCode ierr;
1579 
1580   PetscFunctionBegin;
1581   if (up) {
1582     PetscInt parent;
1583 
1584     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1585     if (parent != point) {
1586       PetscInt closureSize, *closure = NULL, i;
1587 
1588       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1589       for (i = 0; i < closureSize; i++) {
1590         PetscInt cpoint = closure[2*i];
1591 
1592         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1593         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1594       }
1595       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1596     }
1597   }
1598   if (down) {
1599     PetscInt numChildren;
1600     const PetscInt *children;
1601 
1602     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1603     if (numChildren) {
1604       PetscInt i;
1605 
1606       for (i = 0; i < numChildren; i++) {
1607         PetscInt cpoint = children[i];
1608 
1609         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1610         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1611       }
1612     }
1613   }
1614   PetscFunctionReturn(0);
1615 }
1616 
1617 /*@
1618   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1619 
1620   Input Parameters:
1621 + dm     - The DM
1622 - label  - DMLabel assinging ranks to remote roots
1623 
1624   Level: developer
1625 
1626 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1627 @*/
1628 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1629 {
1630   IS              rankIS,   pointIS;
1631   const PetscInt *ranks,   *points;
1632   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1633   PetscInt       *closure = NULL;
1634   PetscErrorCode  ierr;
1635 
1636   PetscFunctionBegin;
1637   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1638   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1639   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1640   for (r = 0; r < numRanks; ++r) {
1641     const PetscInt rank = ranks[r];
1642 
1643     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1644     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1645     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1646     for (p = 0; p < numPoints; ++p) {
1647       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1648       for (c = 0; c < closureSize*2; c += 2) {
1649         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1650         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1651       }
1652     }
1653     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1654     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1655   }
1656   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1657   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1658   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1659   PetscFunctionReturn(0);
1660 }
1661 
1662 /*@
1663   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1664 
1665   Input Parameters:
1666 + dm     - The DM
1667 - label  - DMLabel assinging ranks to remote roots
1668 
1669   Level: developer
1670 
1671 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1672 @*/
1673 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1674 {
1675   IS              rankIS,   pointIS;
1676   const PetscInt *ranks,   *points;
1677   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1678   PetscInt       *adj = NULL;
1679   PetscErrorCode  ierr;
1680 
1681   PetscFunctionBegin;
1682   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1683   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1684   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1685   for (r = 0; r < numRanks; ++r) {
1686     const PetscInt rank = ranks[r];
1687 
1688     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1689     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1690     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1691     for (p = 0; p < numPoints; ++p) {
1692       adjSize = PETSC_DETERMINE;
1693       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1694       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1695     }
1696     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1697     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1698   }
1699   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1700   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1701   ierr = PetscFree(adj);CHKERRQ(ierr);
1702   PetscFunctionReturn(0);
1703 }
1704 
1705 /*@
1706   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1707 
1708   Input Parameters:
1709 + dm     - The DM
1710 - label  - DMLabel assinging ranks to remote roots
1711 
1712   Level: developer
1713 
1714   Note: This is required when generating multi-level overlaps to capture
1715   overlap points from non-neighbouring partitions.
1716 
1717 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1718 @*/
1719 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1720 {
1721   MPI_Comm        comm;
1722   PetscMPIInt     rank;
1723   PetscSF         sfPoint;
1724   DMLabel         lblRoots, lblLeaves;
1725   IS              rankIS, pointIS;
1726   const PetscInt *ranks;
1727   PetscInt        numRanks, r;
1728   PetscErrorCode  ierr;
1729 
1730   PetscFunctionBegin;
1731   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1732   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1733   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1734   /* Pull point contributions from remote leaves into local roots */
1735   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1736   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1737   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1738   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1739   for (r = 0; r < numRanks; ++r) {
1740     const PetscInt remoteRank = ranks[r];
1741     if (remoteRank == rank) continue;
1742     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1743     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1744     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1745   }
1746   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1747   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1748   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1749   /* Push point contributions from roots into remote leaves */
1750   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1751   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1752   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1753   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1754   for (r = 0; r < numRanks; ++r) {
1755     const PetscInt remoteRank = ranks[r];
1756     if (remoteRank == rank) continue;
1757     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1758     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1759     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1760   }
1761   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1762   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1763   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1764   PetscFunctionReturn(0);
1765 }
1766 
1767 /*@
1768   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1769 
1770   Input Parameters:
1771 + dm        - The DM
1772 . rootLabel - DMLabel assinging ranks to local roots
1773 . processSF - A star forest mapping into the local index on each remote rank
1774 
1775   Output Parameter:
1776 - leafLabel - DMLabel assinging ranks to remote roots
1777 
1778   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1779   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1780 
1781   Level: developer
1782 
1783 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1784 @*/
1785 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1786 {
1787   MPI_Comm           comm;
1788   PetscMPIInt        rank, size;
1789   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
1790   PetscSF            sfPoint;
1791   PetscSFNode       *rootPoints, *leafPoints;
1792   PetscSection       rootSection, leafSection;
1793   const PetscSFNode *remote;
1794   const PetscInt    *local, *neighbors;
1795   IS                 valueIS;
1796   PetscErrorCode     ierr;
1797 
1798   PetscFunctionBegin;
1799   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1800   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1801   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1802   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1803 
1804   /* Convert to (point, rank) and use actual owners */
1805   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1806   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
1807   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1808   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1809   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1810   for (n = 0; n < numNeighbors; ++n) {
1811     PetscInt numPoints;
1812 
1813     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1814     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1815   }
1816   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1817   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
1818   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
1819   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1820   for (n = 0; n < numNeighbors; ++n) {
1821     IS              pointIS;
1822     const PetscInt *points;
1823     PetscInt        off, numPoints, p;
1824 
1825     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1826     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1827     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1828     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1829     for (p = 0; p < numPoints; ++p) {
1830       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1831       else       {l = -1;}
1832       if (l >= 0) {rootPoints[off+p] = remote[l];}
1833       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1834     }
1835     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1836     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1837   }
1838   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1839   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1840   /* Communicate overlap */
1841   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1842   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1843   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1844   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
1845   for (p = 0; p < ssize; p++) {
1846     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1847   }
1848   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1849   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1850   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1851   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1852   PetscFunctionReturn(0);
1853 }
1854 
1855 /*@
1856   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1857 
1858   Input Parameters:
1859 + dm    - The DM
1860 . label - DMLabel assinging ranks to remote roots
1861 
1862   Output Parameter:
1863 - sf    - The star forest communication context encapsulating the defined mapping
1864 
1865   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1866 
1867   Level: developer
1868 
1869 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1870 @*/
1871 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1872 {
1873   PetscMPIInt     rank, size;
1874   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1875   PetscSFNode    *remotePoints;
1876   IS              remoteRootIS;
1877   const PetscInt *remoteRoots;
1878   PetscErrorCode ierr;
1879 
1880   PetscFunctionBegin;
1881   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1882   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1883 
1884   for (numRemote = 0, n = 0; n < size; ++n) {
1885     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1886     numRemote += numPoints;
1887   }
1888   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1889   /* Put owned points first */
1890   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1891   if (numPoints > 0) {
1892     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1893     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1894     for (p = 0; p < numPoints; p++) {
1895       remotePoints[idx].index = remoteRoots[p];
1896       remotePoints[idx].rank = rank;
1897       idx++;
1898     }
1899     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1900     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1901   }
1902   /* Now add remote points */
1903   for (n = 0; n < size; ++n) {
1904     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1905     if (numPoints <= 0 || n == rank) continue;
1906     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1907     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1908     for (p = 0; p < numPoints; p++) {
1909       remotePoints[idx].index = remoteRoots[p];
1910       remotePoints[idx].rank = n;
1911       idx++;
1912     }
1913     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1914     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1915   }
1916   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1917   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1918   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1919   PetscFunctionReturn(0);
1920 }
1921