xref: /petsc/src/dm/impls/plex/plexpartition.c (revision efd4aadf157bf1ba2d80c2be092fcf4247860003)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410     part->ops->destroy = NULL;
411   }
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
470 {
471   PetscFunctionBegin;
472   if (!currentType) {
473 #if defined(PETSC_HAVE_CHACO)
474     *defaultType = PETSCPARTITIONERCHACO;
475 #elif defined(PETSC_HAVE_PARMETIS)
476     *defaultType = PETSCPARTITIONERPARMETIS;
477 #else
478     *defaultType = PETSCPARTITIONERSIMPLE;
479 #endif
480   } else {
481     *defaultType = currentType;
482   }
483   PetscFunctionReturn(0);
484 }
485 
486 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
487 {
488   const char    *defaultType;
489   char           name[256];
490   PetscBool      flg;
491   PetscErrorCode ierr;
492 
493   PetscFunctionBegin;
494   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
495   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
496   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
497 
498   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
499   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
500   if (flg) {
501     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
502   } else if (!((PetscObject) part)->type_name) {
503     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
504   }
505   ierr = PetscOptionsEnd();CHKERRQ(ierr);
506   PetscFunctionReturn(0);
507 }
508 
509 /*@
510   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
511 
512   Collective on PetscPartitioner
513 
514   Input Parameter:
515 . part - the PetscPartitioner object to set options for
516 
517   Level: developer
518 
519 .seealso: PetscPartitionerView()
520 @*/
521 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
522 {
523   PetscErrorCode ierr;
524 
525   PetscFunctionBegin;
526   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
527   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
528 
529   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
530   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
531   /* process any options handlers added with PetscObjectAddOptionsHandler() */
532   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
533   ierr = PetscOptionsEnd();CHKERRQ(ierr);
534   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
535   PetscFunctionReturn(0);
536 }
537 
538 /*@C
539   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
540 
541   Collective on PetscPartitioner
542 
543   Input Parameter:
544 . part - the PetscPartitioner object to setup
545 
546   Level: developer
547 
548 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
549 @*/
550 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
551 {
552   PetscErrorCode ierr;
553 
554   PetscFunctionBegin;
555   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
556   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
557   PetscFunctionReturn(0);
558 }
559 
560 /*@
561   PetscPartitionerDestroy - Destroys a PetscPartitioner object
562 
563   Collective on PetscPartitioner
564 
565   Input Parameter:
566 . part - the PetscPartitioner object to destroy
567 
568   Level: developer
569 
570 .seealso: PetscPartitionerView()
571 @*/
572 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
573 {
574   PetscErrorCode ierr;
575 
576   PetscFunctionBegin;
577   if (!*part) PetscFunctionReturn(0);
578   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
579 
580   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
581   ((PetscObject) (*part))->refct = 0;
582 
583   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
584   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
585   PetscFunctionReturn(0);
586 }
587 
588 /*@
589   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
590 
591   Collective on MPI_Comm
592 
593   Input Parameter:
594 . comm - The communicator for the PetscPartitioner object
595 
596   Output Parameter:
597 . part - The PetscPartitioner object
598 
599   Level: beginner
600 
601 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
602 @*/
603 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
604 {
605   PetscPartitioner p;
606   const char       *partitionerType = NULL;
607   PetscErrorCode   ierr;
608 
609   PetscFunctionBegin;
610   PetscValidPointer(part, 2);
611   *part = NULL;
612   ierr = DMInitializePackage();CHKERRQ(ierr);
613 
614   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
615   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
616   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
617 
618   *part = p;
619   PetscFunctionReturn(0);
620 }
621 
622 /*@
623   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
624 
625   Collective on DM
626 
627   Input Parameters:
628 + part    - The PetscPartitioner
629 - dm      - The mesh DM
630 
631   Output Parameters:
632 + partSection     - The PetscSection giving the division of points by partition
633 - partition       - The list of points by partition
634 
635   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
636 
637   Level: developer
638 
639 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
640 @*/
641 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
642 {
643   PetscMPIInt    size;
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
648   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
649   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
650   PetscValidPointer(partition, 5);
651   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
652   if (size == 1) {
653     PetscInt *points;
654     PetscInt  cStart, cEnd, c;
655 
656     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
657     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
658     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
659     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
660     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
661     for (c = cStart; c < cEnd; ++c) points[c] = c;
662     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
663     PetscFunctionReturn(0);
664   }
665   if (part->height == 0) {
666     PetscInt numVertices;
667     PetscInt *start     = NULL;
668     PetscInt *adjacency = NULL;
669     IS       globalNumbering;
670 
671     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
672     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
673     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
674     ierr = PetscFree(start);CHKERRQ(ierr);
675     ierr = PetscFree(adjacency);CHKERRQ(ierr);
676     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
677       const PetscInt *globalNum;
678       const PetscInt *partIdx;
679       PetscInt *map, cStart, cEnd;
680       PetscInt *adjusted, i, localSize, offset;
681       IS    newPartition;
682 
683       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
684       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
685       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
686       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
687       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
688       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
689       for (i = cStart, offset = 0; i < cEnd; i++) {
690         if (globalNum[i - cStart] >= 0) map[offset++] = i;
691       }
692       for (i = 0; i < localSize; i++) {
693         adjusted[i] = map[partIdx[i]];
694       }
695       ierr = PetscFree(map);CHKERRQ(ierr);
696       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
697       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
698       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
699       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
700       ierr = ISDestroy(partition);CHKERRQ(ierr);
701       *partition = newPartition;
702     }
703   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
704   PetscFunctionReturn(0);
705 
706 }
707 
708 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
709 {
710   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
711   PetscErrorCode          ierr;
712 
713   PetscFunctionBegin;
714   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
715   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
716   ierr = PetscFree(p);CHKERRQ(ierr);
717   PetscFunctionReturn(0);
718 }
719 
720 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
721 {
722   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
723   PetscViewerFormat       format;
724   PetscErrorCode          ierr;
725 
726   PetscFunctionBegin;
727   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
728   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
729   if (p->random) {
730     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
731     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
732     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
733   }
734   PetscFunctionReturn(0);
735 }
736 
737 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
738 {
739   PetscBool      iascii;
740   PetscErrorCode ierr;
741 
742   PetscFunctionBegin;
743   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
744   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
745   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
746   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
747   PetscFunctionReturn(0);
748 }
749 
750 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
751 {
752   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
753   PetscErrorCode          ierr;
754 
755   PetscFunctionBegin;
756   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
757   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
758   ierr = PetscOptionsTail();CHKERRQ(ierr);
759   PetscFunctionReturn(0);
760 }
761 
762 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
763 {
764   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
765   PetscInt                np;
766   PetscErrorCode          ierr;
767 
768   PetscFunctionBegin;
769   if (p->random) {
770     PetscRandom r;
771     PetscInt   *sizes, *points, v, p;
772     PetscMPIInt rank;
773 
774     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
775     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
776     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
777     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
778     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
779     for (v = 0; v < numVertices; ++v) {points[v] = v;}
780     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
781     for (v = numVertices-1; v > 0; --v) {
782       PetscReal val;
783       PetscInt  w, tmp;
784 
785       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
786       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
787       w    = PetscFloorReal(val);
788       tmp       = points[v];
789       points[v] = points[w];
790       points[w] = tmp;
791     }
792     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
793     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
794     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
795   }
796   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
797   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
798   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
799   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
800   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
801   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
802   *partition = p->partition;
803   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
804   PetscFunctionReturn(0);
805 }
806 
807 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
808 {
809   PetscFunctionBegin;
810   part->ops->view           = PetscPartitionerView_Shell;
811   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
812   part->ops->destroy        = PetscPartitionerDestroy_Shell;
813   part->ops->partition      = PetscPartitionerPartition_Shell;
814   PetscFunctionReturn(0);
815 }
816 
817 /*MC
818   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
819 
820   Level: intermediate
821 
822 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
823 M*/
824 
825 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
826 {
827   PetscPartitioner_Shell *p;
828   PetscErrorCode          ierr;
829 
830   PetscFunctionBegin;
831   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
832   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
833   part->data = p;
834 
835   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
836   p->random = PETSC_FALSE;
837   PetscFunctionReturn(0);
838 }
839 
840 /*@C
841   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
842 
843   Collective on Part
844 
845   Input Parameters:
846 + part     - The PetscPartitioner
847 . size - The number of partitions
848 . sizes    - array of size size (or NULL) providing the number of points in each partition
849 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
850 
851   Level: developer
852 
853   Notes:
854 
855     It is safe to free the sizes and points arrays after use in this routine.
856 
857 .seealso DMPlexDistribute(), PetscPartitionerCreate()
858 @*/
859 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
860 {
861   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
862   PetscInt                proc, numPoints;
863   PetscErrorCode          ierr;
864 
865   PetscFunctionBegin;
866   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
867   if (sizes)  {PetscValidPointer(sizes, 3);}
868   if (points) {PetscValidPointer(points, 4);}
869   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
870   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
871   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
872   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
873   if (sizes) {
874     for (proc = 0; proc < size; ++proc) {
875       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
876     }
877   }
878   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
879   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
880   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
881   PetscFunctionReturn(0);
882 }
883 
884 /*@
885   PetscPartitionerShellSetRandom - Set the flag to use a random partition
886 
887   Collective on Part
888 
889   Input Parameters:
890 + part   - The PetscPartitioner
891 - random - The flag to use a random partition
892 
893   Level: intermediate
894 
895 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
896 @*/
897 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
898 {
899   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
900 
901   PetscFunctionBegin;
902   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
903   p->random = random;
904   PetscFunctionReturn(0);
905 }
906 
907 /*@
908   PetscPartitionerShellGetRandom - get the flag to use a random partition
909 
910   Collective on Part
911 
912   Input Parameter:
913 . part   - The PetscPartitioner
914 
915   Output Parameter
916 . random - The flag to use a random partition
917 
918   Level: intermediate
919 
920 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
921 @*/
922 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
923 {
924   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
925 
926   PetscFunctionBegin;
927   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
928   PetscValidPointer(random, 2);
929   *random = p->random;
930   PetscFunctionReturn(0);
931 }
932 
933 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
934 {
935   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
936   PetscErrorCode          ierr;
937 
938   PetscFunctionBegin;
939   ierr = PetscFree(p);CHKERRQ(ierr);
940   PetscFunctionReturn(0);
941 }
942 
943 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
944 {
945   PetscViewerFormat format;
946   PetscErrorCode    ierr;
947 
948   PetscFunctionBegin;
949   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
950   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
955 {
956   PetscBool      iascii;
957   PetscErrorCode ierr;
958 
959   PetscFunctionBegin;
960   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
961   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
962   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
963   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
964   PetscFunctionReturn(0);
965 }
966 
967 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
968 {
969   MPI_Comm       comm;
970   PetscInt       np;
971   PetscMPIInt    size;
972   PetscErrorCode ierr;
973 
974   PetscFunctionBegin;
975   comm = PetscObjectComm((PetscObject)dm);
976   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
977   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
978   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
979   if (size == 1) {
980     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
981   }
982   else {
983     PetscMPIInt rank;
984     PetscInt nvGlobal, *offsets, myFirst, myLast;
985 
986     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
987     offsets[0] = 0;
988     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
989     for (np = 2; np <= size; np++) {
990       offsets[np] += offsets[np-1];
991     }
992     nvGlobal = offsets[size];
993     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
994     myFirst = offsets[rank];
995     myLast  = offsets[rank + 1] - 1;
996     ierr = PetscFree(offsets);CHKERRQ(ierr);
997     if (numVertices) {
998       PetscInt firstPart = 0, firstLargePart = 0;
999       PetscInt lastPart = 0, lastLargePart = 0;
1000       PetscInt rem = nvGlobal % nparts;
1001       PetscInt pSmall = nvGlobal/nparts;
1002       PetscInt pBig = nvGlobal/nparts + 1;
1003 
1004 
1005       if (rem) {
1006         firstLargePart = myFirst / pBig;
1007         lastLargePart  = myLast  / pBig;
1008 
1009         if (firstLargePart < rem) {
1010           firstPart = firstLargePart;
1011         }
1012         else {
1013           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1014         }
1015         if (lastLargePart < rem) {
1016           lastPart = lastLargePart;
1017         }
1018         else {
1019           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1020         }
1021       }
1022       else {
1023         firstPart = myFirst / (nvGlobal/nparts);
1024         lastPart  = myLast  / (nvGlobal/nparts);
1025       }
1026 
1027       for (np = firstPart; np <= lastPart; np++) {
1028         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1029         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1030 
1031         PartStart = PetscMax(PartStart,myFirst);
1032         PartEnd   = PetscMin(PartEnd,myLast+1);
1033         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1034       }
1035     }
1036   }
1037   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1042 {
1043   PetscFunctionBegin;
1044   part->ops->view      = PetscPartitionerView_Simple;
1045   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1046   part->ops->partition = PetscPartitionerPartition_Simple;
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 /*MC
1051   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1052 
1053   Level: intermediate
1054 
1055 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1056 M*/
1057 
1058 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1059 {
1060   PetscPartitioner_Simple *p;
1061   PetscErrorCode           ierr;
1062 
1063   PetscFunctionBegin;
1064   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1065   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1066   part->data = p;
1067 
1068   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1073 {
1074   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1075   PetscErrorCode          ierr;
1076 
1077   PetscFunctionBegin;
1078   ierr = PetscFree(p);CHKERRQ(ierr);
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1083 {
1084   PetscViewerFormat format;
1085   PetscErrorCode    ierr;
1086 
1087   PetscFunctionBegin;
1088   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1089   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1090   PetscFunctionReturn(0);
1091 }
1092 
1093 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1094 {
1095   PetscBool      iascii;
1096   PetscErrorCode ierr;
1097 
1098   PetscFunctionBegin;
1099   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1100   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1101   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1102   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1107 {
1108   PetscInt       np;
1109   PetscErrorCode ierr;
1110 
1111   PetscFunctionBegin;
1112   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1113   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1114   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1115   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1116   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1121 {
1122   PetscFunctionBegin;
1123   part->ops->view      = PetscPartitionerView_Gather;
1124   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1125   part->ops->partition = PetscPartitionerPartition_Gather;
1126   PetscFunctionReturn(0);
1127 }
1128 
1129 /*MC
1130   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1131 
1132   Level: intermediate
1133 
1134 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1135 M*/
1136 
1137 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1138 {
1139   PetscPartitioner_Gather *p;
1140   PetscErrorCode           ierr;
1141 
1142   PetscFunctionBegin;
1143   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1144   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1145   part->data = p;
1146 
1147   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1148   PetscFunctionReturn(0);
1149 }
1150 
1151 
1152 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1153 {
1154   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1155   PetscErrorCode          ierr;
1156 
1157   PetscFunctionBegin;
1158   ierr = PetscFree(p);CHKERRQ(ierr);
1159   PetscFunctionReturn(0);
1160 }
1161 
1162 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1163 {
1164   PetscViewerFormat format;
1165   PetscErrorCode    ierr;
1166 
1167   PetscFunctionBegin;
1168   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1169   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1170   PetscFunctionReturn(0);
1171 }
1172 
1173 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1174 {
1175   PetscBool      iascii;
1176   PetscErrorCode ierr;
1177 
1178   PetscFunctionBegin;
1179   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1180   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1181   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1182   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1183   PetscFunctionReturn(0);
1184 }
1185 
1186 #if defined(PETSC_HAVE_CHACO)
1187 #if defined(PETSC_HAVE_UNISTD_H)
1188 #include <unistd.h>
1189 #endif
1190 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1191 #include <chaco.h>
1192 #else
1193 /* Older versions of Chaco do not have an include file */
1194 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1195                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1196                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1197                        int mesh_dims[3], double *goal, int global_method, int local_method,
1198                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1199 #endif
1200 extern int FREE_GRAPH;
1201 #endif
1202 
1203 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1204 {
1205 #if defined(PETSC_HAVE_CHACO)
1206   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1207   MPI_Comm       comm;
1208   int            nvtxs          = numVertices; /* number of vertices in full graph */
1209   int           *vwgts          = NULL;   /* weights for all vertices */
1210   float         *ewgts          = NULL;   /* weights for all edges */
1211   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1212   char          *outassignname  = NULL;   /*  name of assignment output file */
1213   char          *outfilename    = NULL;   /* output file name */
1214   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1215   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1216   int            mesh_dims[3];            /* dimensions of mesh of processors */
1217   double        *goal          = NULL;    /* desired set sizes for each set */
1218   int            global_method = 1;       /* global partitioning algorithm */
1219   int            local_method  = 1;       /* local partitioning algorithm */
1220   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1221   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1222   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1223   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1224   long           seed          = 123636512; /* for random graph mutations */
1225 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1226   int           *assignment;              /* Output partition */
1227 #else
1228   short int     *assignment;              /* Output partition */
1229 #endif
1230   int            fd_stdout, fd_pipe[2];
1231   PetscInt      *points;
1232   int            i, v, p;
1233   PetscErrorCode ierr;
1234 
1235   PetscFunctionBegin;
1236   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1237 #if defined (PETSC_USE_DEBUG)
1238   {
1239     int ival,isum;
1240     PetscBool distributed;
1241 
1242     ival = (numVertices > 0);
1243     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1244     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1245     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1246   }
1247 #endif
1248   if (!numVertices) {
1249     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1250     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1251     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1252     PetscFunctionReturn(0);
1253   }
1254   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1255   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1256 
1257   if (global_method == INERTIAL_METHOD) {
1258     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1259     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1260   }
1261   mesh_dims[0] = nparts;
1262   mesh_dims[1] = 1;
1263   mesh_dims[2] = 1;
1264   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1265   /* Chaco outputs to stdout. We redirect this to a buffer. */
1266   /* TODO: check error codes for UNIX calls */
1267 #if defined(PETSC_HAVE_UNISTD_H)
1268   {
1269     int piperet;
1270     piperet = pipe(fd_pipe);
1271     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1272     fd_stdout = dup(1);
1273     close(1);
1274     dup2(fd_pipe[1], 1);
1275   }
1276 #endif
1277   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1278                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1279                    vmax, ndims, eigtol, seed);
1280 #if defined(PETSC_HAVE_UNISTD_H)
1281   {
1282     char msgLog[10000];
1283     int  count;
1284 
1285     fflush(stdout);
1286     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1287     if (count < 0) count = 0;
1288     msgLog[count] = 0;
1289     close(1);
1290     dup2(fd_stdout, 1);
1291     close(fd_stdout);
1292     close(fd_pipe[0]);
1293     close(fd_pipe[1]);
1294     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1295   }
1296 #else
1297   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1298 #endif
1299   /* Convert to PetscSection+IS */
1300   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1301   for (v = 0; v < nvtxs; ++v) {
1302     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1303   }
1304   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1305   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1306   for (p = 0, i = 0; p < nparts; ++p) {
1307     for (v = 0; v < nvtxs; ++v) {
1308       if (assignment[v] == p) points[i++] = v;
1309     }
1310   }
1311   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1312   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1313   if (global_method == INERTIAL_METHOD) {
1314     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1315   }
1316   ierr = PetscFree(assignment);CHKERRQ(ierr);
1317   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1318   PetscFunctionReturn(0);
1319 #else
1320   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1321 #endif
1322 }
1323 
1324 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1325 {
1326   PetscFunctionBegin;
1327   part->ops->view      = PetscPartitionerView_Chaco;
1328   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1329   part->ops->partition = PetscPartitionerPartition_Chaco;
1330   PetscFunctionReturn(0);
1331 }
1332 
1333 /*MC
1334   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1335 
1336   Level: intermediate
1337 
1338 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1339 M*/
1340 
1341 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1342 {
1343   PetscPartitioner_Chaco *p;
1344   PetscErrorCode          ierr;
1345 
1346   PetscFunctionBegin;
1347   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1348   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1349   part->data = p;
1350 
1351   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1352   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1353   PetscFunctionReturn(0);
1354 }
1355 
1356 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1357 {
1358   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1359   PetscErrorCode             ierr;
1360 
1361   PetscFunctionBegin;
1362   ierr = PetscFree(p);CHKERRQ(ierr);
1363   PetscFunctionReturn(0);
1364 }
1365 
1366 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1367 {
1368   PetscViewerFormat format;
1369   PetscErrorCode    ierr;
1370 
1371   PetscFunctionBegin;
1372   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1373   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1378 {
1379   PetscBool      iascii;
1380   PetscErrorCode ierr;
1381 
1382   PetscFunctionBegin;
1383   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1384   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1385   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1386   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1387   PetscFunctionReturn(0);
1388 }
1389 
1390 #if defined(PETSC_HAVE_PARMETIS)
1391 #include <parmetis.h>
1392 #endif
1393 
1394 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1395 {
1396 #if defined(PETSC_HAVE_PARMETIS)
1397   MPI_Comm       comm;
1398   PetscSection   section;
1399   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1400   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1401   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1402   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1403   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1404   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1405   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1406   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1407   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1408   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1409   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1410   PetscInt       options[5];               /* Options */
1411   /* Outputs */
1412   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1413   PetscInt      *assignment, *points;
1414   PetscMPIInt    rank, p, v, i;
1415   PetscErrorCode ierr;
1416 
1417   PetscFunctionBegin;
1418   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1419   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1420   options[0] = 0; /* Use all defaults */
1421   /* Calculate vertex distribution */
1422   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1423   vtxdist[0] = 0;
1424   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1425   for (p = 2; p <= nparts; ++p) {
1426     vtxdist[p] += vtxdist[p-1];
1427   }
1428   /* Calculate weights */
1429   for (p = 0; p < nparts; ++p) {
1430     tpwgts[p] = 1.0/nparts;
1431   }
1432   ubvec[0] = 1.05;
1433   /* Weight cells by dofs on cell by default */
1434   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1435   if (section) {
1436     PetscInt cStart, cEnd, dof;
1437 
1438     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1439     for (v = cStart; v < cEnd; ++v) {
1440       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1441       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1442       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1443       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1444     }
1445   } else {
1446     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1447   }
1448 
1449   if (nparts == 1) {
1450     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1451   } else {
1452     if (vtxdist[1] == vtxdist[nparts]) {
1453       if (!rank) {
1454         PetscStackPush("METIS_PartGraphKway");
1455         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1456         PetscStackPop;
1457         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1458       }
1459     } else {
1460       PetscStackPush("ParMETIS_V3_PartKway");
1461       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1462       PetscStackPop;
1463       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1464     }
1465   }
1466   /* Convert to PetscSection+IS */
1467   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1468   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1469   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1470   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1471   for (p = 0, i = 0; p < nparts; ++p) {
1472     for (v = 0; v < nvtxs; ++v) {
1473       if (assignment[v] == p) points[i++] = v;
1474     }
1475   }
1476   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1477   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1478   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1479   PetscFunctionReturn(0);
1480 #else
1481   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1482 #endif
1483 }
1484 
1485 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1486 {
1487   PetscFunctionBegin;
1488   part->ops->view      = PetscPartitionerView_ParMetis;
1489   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1490   part->ops->partition = PetscPartitionerPartition_ParMetis;
1491   PetscFunctionReturn(0);
1492 }
1493 
1494 /*MC
1495   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1496 
1497   Level: intermediate
1498 
1499 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1500 M*/
1501 
1502 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1503 {
1504   PetscPartitioner_ParMetis *p;
1505   PetscErrorCode          ierr;
1506 
1507   PetscFunctionBegin;
1508   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1509   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1510   part->data = p;
1511 
1512   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1513   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1514   PetscFunctionReturn(0);
1515 }
1516 
1517 /*@
1518   DMPlexGetPartitioner - Get the mesh partitioner
1519 
1520   Not collective
1521 
1522   Input Parameter:
1523 . dm - The DM
1524 
1525   Output Parameter:
1526 . part - The PetscPartitioner
1527 
1528   Level: developer
1529 
1530   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1531 
1532 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1533 @*/
1534 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1535 {
1536   DM_Plex       *mesh = (DM_Plex *) dm->data;
1537 
1538   PetscFunctionBegin;
1539   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1540   PetscValidPointer(part, 2);
1541   *part = mesh->partitioner;
1542   PetscFunctionReturn(0);
1543 }
1544 
1545 /*@
1546   DMPlexSetPartitioner - Set the mesh partitioner
1547 
1548   logically collective on dm and part
1549 
1550   Input Parameters:
1551 + dm - The DM
1552 - part - The partitioner
1553 
1554   Level: developer
1555 
1556   Note: Any existing PetscPartitioner will be destroyed.
1557 
1558 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1559 @*/
1560 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1561 {
1562   DM_Plex       *mesh = (DM_Plex *) dm->data;
1563   PetscErrorCode ierr;
1564 
1565   PetscFunctionBegin;
1566   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1567   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1568   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1569   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1570   mesh->partitioner = part;
1571   PetscFunctionReturn(0);
1572 }
1573 
1574 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1575 {
1576   PetscErrorCode ierr;
1577 
1578   PetscFunctionBegin;
1579   if (up) {
1580     PetscInt parent;
1581 
1582     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1583     if (parent != point) {
1584       PetscInt closureSize, *closure = NULL, i;
1585 
1586       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1587       for (i = 0; i < closureSize; i++) {
1588         PetscInt cpoint = closure[2*i];
1589 
1590         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1591         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1592       }
1593       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1594     }
1595   }
1596   if (down) {
1597     PetscInt numChildren;
1598     const PetscInt *children;
1599 
1600     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1601     if (numChildren) {
1602       PetscInt i;
1603 
1604       for (i = 0; i < numChildren; i++) {
1605         PetscInt cpoint = children[i];
1606 
1607         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1608         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1609       }
1610     }
1611   }
1612   PetscFunctionReturn(0);
1613 }
1614 
1615 /*@
1616   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1617 
1618   Input Parameters:
1619 + dm     - The DM
1620 - label  - DMLabel assinging ranks to remote roots
1621 
1622   Level: developer
1623 
1624 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1625 @*/
1626 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1627 {
1628   IS              rankIS,   pointIS;
1629   const PetscInt *ranks,   *points;
1630   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1631   PetscInt       *closure = NULL;
1632   PetscErrorCode  ierr;
1633 
1634   PetscFunctionBegin;
1635   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1636   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1637   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1638   for (r = 0; r < numRanks; ++r) {
1639     const PetscInt rank = ranks[r];
1640 
1641     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1642     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1643     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1644     for (p = 0; p < numPoints; ++p) {
1645       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1646       for (c = 0; c < closureSize*2; c += 2) {
1647         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1648         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1649       }
1650     }
1651     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1652     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1653   }
1654   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1655   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1656   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1657   PetscFunctionReturn(0);
1658 }
1659 
1660 /*@
1661   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1662 
1663   Input Parameters:
1664 + dm     - The DM
1665 - label  - DMLabel assinging ranks to remote roots
1666 
1667   Level: developer
1668 
1669 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1670 @*/
1671 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1672 {
1673   IS              rankIS,   pointIS;
1674   const PetscInt *ranks,   *points;
1675   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1676   PetscInt       *adj = NULL;
1677   PetscErrorCode  ierr;
1678 
1679   PetscFunctionBegin;
1680   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1681   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1682   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1683   for (r = 0; r < numRanks; ++r) {
1684     const PetscInt rank = ranks[r];
1685 
1686     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1687     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1688     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1689     for (p = 0; p < numPoints; ++p) {
1690       adjSize = PETSC_DETERMINE;
1691       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1692       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1693     }
1694     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1695     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1696   }
1697   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1698   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1699   ierr = PetscFree(adj);CHKERRQ(ierr);
1700   PetscFunctionReturn(0);
1701 }
1702 
1703 /*@
1704   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1705 
1706   Input Parameters:
1707 + dm     - The DM
1708 - label  - DMLabel assinging ranks to remote roots
1709 
1710   Level: developer
1711 
1712   Note: This is required when generating multi-level overlaps to capture
1713   overlap points from non-neighbouring partitions.
1714 
1715 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1716 @*/
1717 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1718 {
1719   MPI_Comm        comm;
1720   PetscMPIInt     rank;
1721   PetscSF         sfPoint;
1722   DMLabel         lblRoots, lblLeaves;
1723   IS              rankIS, pointIS;
1724   const PetscInt *ranks;
1725   PetscInt        numRanks, r;
1726   PetscErrorCode  ierr;
1727 
1728   PetscFunctionBegin;
1729   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1730   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1731   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1732   /* Pull point contributions from remote leaves into local roots */
1733   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1734   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1735   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1736   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1737   for (r = 0; r < numRanks; ++r) {
1738     const PetscInt remoteRank = ranks[r];
1739     if (remoteRank == rank) continue;
1740     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1741     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1742     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1743   }
1744   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1745   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1746   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1747   /* Push point contributions from roots into remote leaves */
1748   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1749   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1750   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1751   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1752   for (r = 0; r < numRanks; ++r) {
1753     const PetscInt remoteRank = ranks[r];
1754     if (remoteRank == rank) continue;
1755     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1756     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1757     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1758   }
1759   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1760   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1761   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1762   PetscFunctionReturn(0);
1763 }
1764 
1765 /*@
1766   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1767 
1768   Input Parameters:
1769 + dm        - The DM
1770 . rootLabel - DMLabel assinging ranks to local roots
1771 . processSF - A star forest mapping into the local index on each remote rank
1772 
1773   Output Parameter:
1774 - leafLabel - DMLabel assinging ranks to remote roots
1775 
1776   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1777   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1778 
1779   Level: developer
1780 
1781 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1782 @*/
1783 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1784 {
1785   MPI_Comm           comm;
1786   PetscMPIInt        rank, size;
1787   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
1788   PetscSF            sfPoint;
1789   PetscSFNode       *rootPoints, *leafPoints;
1790   PetscSection       rootSection, leafSection;
1791   const PetscSFNode *remote;
1792   const PetscInt    *local, *neighbors;
1793   IS                 valueIS;
1794   PetscErrorCode     ierr;
1795 
1796   PetscFunctionBegin;
1797   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1798   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1799   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1800   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1801 
1802   /* Convert to (point, rank) and use actual owners */
1803   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1804   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
1805   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1806   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1807   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1808   for (n = 0; n < numNeighbors; ++n) {
1809     PetscInt numPoints;
1810 
1811     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1812     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1813   }
1814   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1815   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
1816   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
1817   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1818   for (n = 0; n < numNeighbors; ++n) {
1819     IS              pointIS;
1820     const PetscInt *points;
1821     PetscInt        off, numPoints, p;
1822 
1823     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1824     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1825     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1826     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1827     for (p = 0; p < numPoints; ++p) {
1828       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1829       else       {l = -1;}
1830       if (l >= 0) {rootPoints[off+p] = remote[l];}
1831       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1832     }
1833     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1834     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1835   }
1836   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1837   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1838   /* Communicate overlap */
1839   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1840   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1841   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1842   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
1843   for (p = 0; p < ssize; p++) {
1844     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1845   }
1846   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1847   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1848   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1849   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1850   PetscFunctionReturn(0);
1851 }
1852 
1853 /*@
1854   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1855 
1856   Input Parameters:
1857 + dm    - The DM
1858 . label - DMLabel assinging ranks to remote roots
1859 
1860   Output Parameter:
1861 - sf    - The star forest communication context encapsulating the defined mapping
1862 
1863   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1864 
1865   Level: developer
1866 
1867 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1868 @*/
1869 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1870 {
1871   PetscMPIInt     rank, size;
1872   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1873   PetscSFNode    *remotePoints;
1874   IS              remoteRootIS;
1875   const PetscInt *remoteRoots;
1876   PetscErrorCode ierr;
1877 
1878   PetscFunctionBegin;
1879   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1880   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1881 
1882   for (numRemote = 0, n = 0; n < size; ++n) {
1883     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1884     numRemote += numPoints;
1885   }
1886   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1887   /* Put owned points first */
1888   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1889   if (numPoints > 0) {
1890     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1891     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1892     for (p = 0; p < numPoints; p++) {
1893       remotePoints[idx].index = remoteRoots[p];
1894       remotePoints[idx].rank = rank;
1895       idx++;
1896     }
1897     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1898     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1899   }
1900   /* Now add remote points */
1901   for (n = 0; n < size; ++n) {
1902     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1903     if (numPoints <= 0 || n == rank) continue;
1904     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1905     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1906     for (p = 0; p < numPoints; p++) {
1907       remotePoints[idx].index = remoteRoots[p];
1908       remotePoints[idx].rank = n;
1909       idx++;
1910     }
1911     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1912     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1913   }
1914   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1915   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1916   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1917   PetscFunctionReturn(0);
1918 }
1919