xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 8ac45ccc23894227cb1ca8f664fb7d4a030bd561)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
111   if (nroots > 0) {
112     ISLocalToGlobalMapping ltogCells;
113     PetscInt n, size, *cells_arr;
114     /* In parallel, apply a global cell numbering to the graph */
115     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
119     /* Convert to positive global cell numbers */
120     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
121     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
124   }
125   if (adjacency) *adjacency = graph;
126   /* Clean up */
127   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
128   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
129   ierr = PetscFree(adj);CHKERRQ(ierr);
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "DMPlexCreateNeighborCSR"
135 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136 {
137   const PetscInt maxFaceCases = 30;
138   PetscInt       numFaceCases = 0;
139   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140   PetscInt      *off, *adj;
141   PetscInt      *neighborCells = NULL;
142   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   /* For parallel partitioning, I think you have to communicate supports */
147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
148   cellDim = dim - cellHeight;
149   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
150   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
151   if (cEnd - cStart == 0) {
152     if (numVertices) *numVertices = 0;
153     if (offsets)   *offsets   = NULL;
154     if (adjacency) *adjacency = NULL;
155     PetscFunctionReturn(0);
156   }
157   numCells  = cEnd - cStart;
158   faceDepth = depth - cellHeight;
159   if (dim == depth) {
160     PetscInt f, fStart, fEnd;
161 
162     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
163     /* Count neighboring cells */
164     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
165     for (f = fStart; f < fEnd; ++f) {
166       const PetscInt *support;
167       PetscInt        supportSize;
168       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
169       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
170       if (supportSize == 2) {
171         PetscInt numChildren;
172 
173         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
174         if (!numChildren) {
175           ++off[support[0]-cStart+1];
176           ++off[support[1]-cStart+1];
177         }
178       }
179     }
180     /* Prefix sum */
181     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182     if (adjacency) {
183       PetscInt *tmp;
184 
185       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
186       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
187       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
188       /* Get neighboring cells */
189       for (f = fStart; f < fEnd; ++f) {
190         const PetscInt *support;
191         PetscInt        supportSize;
192         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
193         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
194         if (supportSize == 2) {
195           PetscInt numChildren;
196 
197           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
198           if (!numChildren) {
199             adj[tmp[support[0]-cStart]++] = support[1];
200             adj[tmp[support[1]-cStart]++] = support[0];
201           }
202         }
203       }
204 #if defined(PETSC_USE_DEBUG)
205       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206 #endif
207       ierr = PetscFree(tmp);CHKERRQ(ierr);
208     }
209     if (numVertices) *numVertices = numCells;
210     if (offsets)   *offsets   = off;
211     if (adjacency) *adjacency = adj;
212     PetscFunctionReturn(0);
213   }
214   /* Setup face recognition */
215   if (faceDepth == 1) {
216     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217 
218     for (c = cStart; c < cEnd; ++c) {
219       PetscInt corners;
220 
221       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
222       if (!cornersSeen[corners]) {
223         PetscInt nFV;
224 
225         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226         cornersSeen[corners] = 1;
227 
228         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
229 
230         numFaceVertices[numFaceCases++] = nFV;
231       }
232     }
233   }
234   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
235   /* Count neighboring cells */
236   for (cell = cStart; cell < cEnd; ++cell) {
237     PetscInt numNeighbors = PETSC_DETERMINE, n;
238 
239     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
240     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241     for (n = 0; n < numNeighbors; ++n) {
242       PetscInt        cellPair[2];
243       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244       PetscInt        meetSize = 0;
245       const PetscInt *meet    = NULL;
246 
247       cellPair[0] = cell; cellPair[1] = neighborCells[n];
248       if (cellPair[0] == cellPair[1]) continue;
249       if (!found) {
250         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
251         if (meetSize) {
252           PetscInt f;
253 
254           for (f = 0; f < numFaceCases; ++f) {
255             if (numFaceVertices[f] == meetSize) {
256               found = PETSC_TRUE;
257               break;
258             }
259           }
260         }
261         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
262       }
263       if (found) ++off[cell-cStart+1];
264     }
265   }
266   /* Prefix sum */
267   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268 
269   if (adjacency) {
270     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
271     /* Get neighboring cells */
272     for (cell = cStart; cell < cEnd; ++cell) {
273       PetscInt numNeighbors = PETSC_DETERMINE, n;
274       PetscInt cellOffset   = 0;
275 
276       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
277       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278       for (n = 0; n < numNeighbors; ++n) {
279         PetscInt        cellPair[2];
280         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281         PetscInt        meetSize = 0;
282         const PetscInt *meet    = NULL;
283 
284         cellPair[0] = cell; cellPair[1] = neighborCells[n];
285         if (cellPair[0] == cellPair[1]) continue;
286         if (!found) {
287           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
288           if (meetSize) {
289             PetscInt f;
290 
291             for (f = 0; f < numFaceCases; ++f) {
292               if (numFaceVertices[f] == meetSize) {
293                 found = PETSC_TRUE;
294                 break;
295               }
296             }
297           }
298           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
299         }
300         if (found) {
301           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302           ++cellOffset;
303         }
304       }
305     }
306   }
307   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
308   if (numVertices) *numVertices = numCells;
309   if (offsets)   *offsets   = off;
310   if (adjacency) *adjacency = adj;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PetscPartitionerRegister"
316 /*@C
317   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
318 
319   Not Collective
320 
321   Input Parameters:
322 + name        - The name of a new user-defined creation routine
323 - create_func - The creation routine itself
324 
325   Notes:
326   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
327 
328   Sample usage:
329 .vb
330     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
331 .ve
332 
333   Then, your PetscPartitioner type can be chosen with the procedural interface via
334 .vb
335     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
336     PetscPartitionerSetType(PetscPartitioner, "my_part");
337 .ve
338    or at runtime via the option
339 .vb
340     -petscpartitioner_type my_part
341 .ve
342 
343   Level: advanced
344 
345 .keywords: PetscPartitioner, register
346 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
347 
348 @*/
349 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
350 {
351   PetscErrorCode ierr;
352 
353   PetscFunctionBegin;
354   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PetscPartitionerSetType"
360 /*@C
361   PetscPartitionerSetType - Builds a particular PetscPartitioner
362 
363   Collective on PetscPartitioner
364 
365   Input Parameters:
366 + part - The PetscPartitioner object
367 - name - The kind of partitioner
368 
369   Options Database Key:
370 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
371 
372   Level: intermediate
373 
374 .keywords: PetscPartitioner, set, type
375 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
376 @*/
377 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
378 {
379   PetscErrorCode (*r)(PetscPartitioner);
380   PetscBool      match;
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
385   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
386   if (match) PetscFunctionReturn(0);
387 
388   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
389   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
390   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
391 
392   if (part->ops->destroy) {
393     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
394     part->ops->destroy = NULL;
395   }
396   ierr = (*r)(part);CHKERRQ(ierr);
397   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
398   PetscFunctionReturn(0);
399 }
400 
401 #undef __FUNCT__
402 #define __FUNCT__ "PetscPartitionerGetType"
403 /*@C
404   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
405 
406   Not Collective
407 
408   Input Parameter:
409 . part - The PetscPartitioner
410 
411   Output Parameter:
412 . name - The PetscPartitioner type name
413 
414   Level: intermediate
415 
416 .keywords: PetscPartitioner, get, type, name
417 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
418 @*/
419 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
420 {
421   PetscErrorCode ierr;
422 
423   PetscFunctionBegin;
424   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
425   PetscValidPointer(name, 2);
426   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
427   *name = ((PetscObject) part)->type_name;
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "PetscPartitionerView"
433 /*@C
434   PetscPartitionerView - Views a PetscPartitioner
435 
436   Collective on PetscPartitioner
437 
438   Input Parameter:
439 + part - the PetscPartitioner object to view
440 - v    - the viewer
441 
442   Level: developer
443 
444 .seealso: PetscPartitionerDestroy()
445 @*/
446 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
447 {
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
452   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
453   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
454   PetscFunctionReturn(0);
455 }
456 
457 #undef __FUNCT__
458 #define __FUNCT__ "PetscPartitionerViewFromOptions"
459 /*
460   PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed.
461 
462   Collective on PetscPartitioner
463 
464   Input Parameters:
465 + part   - the PetscPartitioner
466 . prefix - prefix to use for viewing, or NULL
467 - optionname - option to activate viewing
468 
469   Level: intermediate
470 
471 .keywords: PetscPartitioner, view, options, database
472 .seealso: VecViewFromOptions(), MatViewFromOptions()
473 */
474 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[])
475 {
476   PetscViewer       viewer;
477   PetscViewerFormat format;
478   PetscBool         flg;
479   PetscErrorCode    ierr;
480 
481   PetscFunctionBegin;
482   if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix,                       optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
483   else        {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
484   if (flg) {
485     ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr);
486     ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr);
487     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
488     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
489   }
490   PetscFunctionReturn(0);
491 }
492 #undef __FUNCT__
493 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
494 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
495 {
496   const char    *defaultType;
497   char           name[256];
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
503   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
504   else                                  defaultType = ((PetscObject) part)->type_name;
505   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
506 
507   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
508   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
509   if (flg) {
510     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
511   } else if (!((PetscObject) part)->type_name) {
512     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
513   }
514   ierr = PetscOptionsEnd();CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 #undef __FUNCT__
519 #define __FUNCT__ "PetscPartitionerSetFromOptions"
520 /*@
521   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
522 
523   Collective on PetscPartitioner
524 
525   Input Parameter:
526 . part - the PetscPartitioner object to set options for
527 
528   Level: developer
529 
530 .seealso: PetscPartitionerView()
531 @*/
532 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
533 {
534   PetscErrorCode ierr;
535 
536   PetscFunctionBegin;
537   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
538   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
539 
540   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
541   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
542   /* process any options handlers added with PetscObjectAddOptionsHandler() */
543   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
544   ierr = PetscOptionsEnd();CHKERRQ(ierr);
545   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
546   PetscFunctionReturn(0);
547 }
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "PetscPartitionerSetUp"
551 /*@C
552   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
553 
554   Collective on PetscPartitioner
555 
556   Input Parameter:
557 . part - the PetscPartitioner object to setup
558 
559   Level: developer
560 
561 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
562 @*/
563 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
564 {
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
569   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
570   PetscFunctionReturn(0);
571 }
572 
573 #undef __FUNCT__
574 #define __FUNCT__ "PetscPartitionerDestroy"
575 /*@
576   PetscPartitionerDestroy - Destroys a PetscPartitioner object
577 
578   Collective on PetscPartitioner
579 
580   Input Parameter:
581 . part - the PetscPartitioner object to destroy
582 
583   Level: developer
584 
585 .seealso: PetscPartitionerView()
586 @*/
587 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
588 {
589   PetscErrorCode ierr;
590 
591   PetscFunctionBegin;
592   if (!*part) PetscFunctionReturn(0);
593   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
594 
595   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
596   ((PetscObject) (*part))->refct = 0;
597 
598   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
599   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
600   PetscFunctionReturn(0);
601 }
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "PetscPartitionerCreate"
605 /*@
606   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
607 
608   Collective on MPI_Comm
609 
610   Input Parameter:
611 . comm - The communicator for the PetscPartitioner object
612 
613   Output Parameter:
614 . part - The PetscPartitioner object
615 
616   Level: beginner
617 
618 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL
619 @*/
620 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
621 {
622   PetscPartitioner p;
623   PetscErrorCode   ierr;
624 
625   PetscFunctionBegin;
626   PetscValidPointer(part, 2);
627   *part = NULL;
628   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
629 
630   ierr = PetscHeaderCreate(p, _p_PetscPartitioner, struct _PetscPartitionerOps, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
631   ierr = PetscMemzero(p->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
632 
633   *part = p;
634   PetscFunctionReturn(0);
635 }
636 
637 #undef __FUNCT__
638 #define __FUNCT__ "PetscPartitionerPartition"
639 /*@
640   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
641 
642   Collective on DM
643 
644   Input Parameters:
645 + part    - The PetscPartitioner
646 - dm      - The mesh DM
647 
648   Output Parameters:
649 + partSection     - The PetscSection giving the division of points by partition
650 - partition       - The list of points by partition
651 
652   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
653 
654   Level: developer
655 
656 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
657 @*/
658 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
659 {
660   PetscMPIInt    size;
661   PetscErrorCode ierr;
662 
663   PetscFunctionBegin;
664   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
665   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
666   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
667   PetscValidPointer(partition, 5);
668   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
669   if (size == 1) {
670     PetscInt *points;
671     PetscInt  cStart, cEnd, c;
672 
673     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
674     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
675     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
676     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
677     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
678     for (c = cStart; c < cEnd; ++c) points[c] = c;
679     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
680     PetscFunctionReturn(0);
681   }
682   if (part->height == 0) {
683     PetscInt  numVertices;
684     PetscInt *start     = NULL;
685     PetscInt *adjacency = NULL;
686 
687     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
688     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
689     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
690     ierr = PetscFree(start);CHKERRQ(ierr);
691     ierr = PetscFree(adjacency);CHKERRQ(ierr);
692   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
693   PetscFunctionReturn(0);
694 }
695 
696 #undef __FUNCT__
697 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
698 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
699 {
700   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
701   PetscErrorCode          ierr;
702 
703   PetscFunctionBegin;
704   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
705   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
706   ierr = PetscFree(p);CHKERRQ(ierr);
707   PetscFunctionReturn(0);
708 }
709 
710 #undef __FUNCT__
711 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
712 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
713 {
714   PetscViewerFormat format;
715   PetscErrorCode    ierr;
716 
717   PetscFunctionBegin;
718   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
719   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
720   PetscFunctionReturn(0);
721 }
722 
723 #undef __FUNCT__
724 #define __FUNCT__ "PetscPartitionerView_Shell"
725 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
726 {
727   PetscBool      iascii;
728   PetscErrorCode ierr;
729 
730   PetscFunctionBegin;
731   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
732   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
733   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
734   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
735   PetscFunctionReturn(0);
736 }
737 
738 #undef __FUNCT__
739 #define __FUNCT__ "PetscPartitionerPartition_Shell"
740 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
741 {
742   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
743   PetscInt                np;
744   PetscErrorCode          ierr;
745 
746   PetscFunctionBegin;
747   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
748   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
749   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
750   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
751   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
752   *partition = p->partition;
753   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
754   PetscFunctionReturn(0);
755 }
756 
757 #undef __FUNCT__
758 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
759 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
760 {
761   PetscFunctionBegin;
762   part->ops->view      = PetscPartitionerView_Shell;
763   part->ops->destroy   = PetscPartitionerDestroy_Shell;
764   part->ops->partition = PetscPartitionerPartition_Shell;
765   PetscFunctionReturn(0);
766 }
767 
768 /*MC
769   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
770 
771   Level: intermediate
772 
773 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
774 M*/
775 
776 #undef __FUNCT__
777 #define __FUNCT__ "PetscPartitionerCreate_Shell"
778 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
779 {
780   PetscPartitioner_Shell *p;
781   PetscErrorCode          ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
785   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
786   part->data = p;
787 
788   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
789   PetscFunctionReturn(0);
790 }
791 
792 #undef __FUNCT__
793 #define __FUNCT__ "PetscPartitionerShellSetPartition"
794 /*@C
795   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
796 
797   Collective on PART
798 
799   Input Parameters:
800 + part     - The PetscPartitioner
801 . numProcs - The number of partitions
802 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
803 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
804 
805   Level: developer
806 
807   Notes:
808 
809     It is safe to free the sizes and points arrays after use in this routine.
810 
811 .seealso DMPlexDistribute(), PetscPartitionerCreate()
812 @*/
813 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
814 {
815   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
816   PetscInt                proc, numPoints;
817   PetscErrorCode          ierr;
818 
819   PetscFunctionBegin;
820   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
821   if (sizes) {PetscValidPointer(sizes, 3);}
822   if (sizes) {PetscValidPointer(points, 4);}
823   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
824   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
825   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
826   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
827   if (sizes) {
828     for (proc = 0; proc < numProcs; ++proc) {
829       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
830     }
831   }
832   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
833   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
834   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
840 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
841 {
842   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
843   PetscErrorCode          ierr;
844 
845   PetscFunctionBegin;
846   ierr = PetscFree(p);CHKERRQ(ierr);
847   PetscFunctionReturn(0);
848 }
849 
850 #undef __FUNCT__
851 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
852 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
853 {
854   PetscViewerFormat format;
855   PetscErrorCode    ierr;
856 
857   PetscFunctionBegin;
858   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
859   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "PetscPartitionerView_Chaco"
865 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
866 {
867   PetscBool      iascii;
868   PetscErrorCode ierr;
869 
870   PetscFunctionBegin;
871   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
872   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
873   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
874   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
875   PetscFunctionReturn(0);
876 }
877 
878 #if defined(PETSC_HAVE_CHACO)
879 #if defined(PETSC_HAVE_UNISTD_H)
880 #include <unistd.h>
881 #endif
882 /* Chaco does not have an include file */
883 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
884                        float *ewgts, float *x, float *y, float *z, char *outassignname,
885                        char *outfilename, short *assignment, int architecture, int ndims_tot,
886                        int mesh_dims[3], double *goal, int global_method, int local_method,
887                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
888 
889 extern int FREE_GRAPH;
890 #endif
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
894 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
895 {
896 #if defined(PETSC_HAVE_CHACO)
897   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
898   MPI_Comm       comm;
899   int            nvtxs          = numVertices; /* number of vertices in full graph */
900   int           *vwgts          = NULL;   /* weights for all vertices */
901   float         *ewgts          = NULL;   /* weights for all edges */
902   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
903   char          *outassignname  = NULL;   /*  name of assignment output file */
904   char          *outfilename    = NULL;   /* output file name */
905   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
906   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
907   int            mesh_dims[3];            /* dimensions of mesh of processors */
908   double        *goal          = NULL;    /* desired set sizes for each set */
909   int            global_method = 1;       /* global partitioning algorithm */
910   int            local_method  = 1;       /* local partitioning algorithm */
911   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
912   int            vmax          = 200;     /* how many vertices to coarsen down to? */
913   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
914   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
915   long           seed          = 123636512; /* for random graph mutations */
916   short int     *assignment;              /* Output partition */
917   int            fd_stdout, fd_pipe[2];
918   PetscInt      *points;
919   int            i, v, p;
920   PetscErrorCode ierr;
921 
922   PetscFunctionBegin;
923   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
924   if (!numVertices) {
925     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
926     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
927     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
928     PetscFunctionReturn(0);
929   }
930   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
931   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
932 
933   if (global_method == INERTIAL_METHOD) {
934     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
935     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
936   }
937   mesh_dims[0] = nparts;
938   mesh_dims[1] = 1;
939   mesh_dims[2] = 1;
940   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
941   /* Chaco outputs to stdout. We redirect this to a buffer. */
942   /* TODO: check error codes for UNIX calls */
943 #if defined(PETSC_HAVE_UNISTD_H)
944   {
945     int piperet;
946     piperet = pipe(fd_pipe);
947     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
948     fd_stdout = dup(1);
949     close(1);
950     dup2(fd_pipe[1], 1);
951   }
952 #endif
953   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
954                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
955                    vmax, ndims, eigtol, seed);
956 #if defined(PETSC_HAVE_UNISTD_H)
957   {
958     char msgLog[10000];
959     int  count;
960 
961     fflush(stdout);
962     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
963     if (count < 0) count = 0;
964     msgLog[count] = 0;
965     close(1);
966     dup2(fd_stdout, 1);
967     close(fd_stdout);
968     close(fd_pipe[0]);
969     close(fd_pipe[1]);
970     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
971   }
972 #endif
973   /* Convert to PetscSection+IS */
974   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
975   for (v = 0; v < nvtxs; ++v) {
976     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
977   }
978   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
979   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
980   for (p = 0, i = 0; p < nparts; ++p) {
981     for (v = 0; v < nvtxs; ++v) {
982       if (assignment[v] == p) points[i++] = v;
983     }
984   }
985   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
986   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
987   if (global_method == INERTIAL_METHOD) {
988     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
989   }
990   ierr = PetscFree(assignment);CHKERRQ(ierr);
991   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
992   PetscFunctionReturn(0);
993 #else
994   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
995 #endif
996 }
997 
998 #undef __FUNCT__
999 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1000 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1001 {
1002   PetscFunctionBegin;
1003   part->ops->view      = PetscPartitionerView_Chaco;
1004   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1005   part->ops->partition = PetscPartitionerPartition_Chaco;
1006   PetscFunctionReturn(0);
1007 }
1008 
1009 /*MC
1010   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1011 
1012   Level: intermediate
1013 
1014 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1015 M*/
1016 
1017 #undef __FUNCT__
1018 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1019 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1020 {
1021   PetscPartitioner_Chaco *p;
1022   PetscErrorCode          ierr;
1023 
1024   PetscFunctionBegin;
1025   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1026   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1027   part->data = p;
1028 
1029   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1030   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1031   PetscFunctionReturn(0);
1032 }
1033 
1034 #undef __FUNCT__
1035 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1036 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1037 {
1038   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1039   PetscErrorCode             ierr;
1040 
1041   PetscFunctionBegin;
1042   ierr = PetscFree(p);CHKERRQ(ierr);
1043   PetscFunctionReturn(0);
1044 }
1045 
1046 #undef __FUNCT__
1047 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1048 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1049 {
1050   PetscViewerFormat format;
1051   PetscErrorCode    ierr;
1052 
1053   PetscFunctionBegin;
1054   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1055   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1056   PetscFunctionReturn(0);
1057 }
1058 
1059 #undef __FUNCT__
1060 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1061 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1062 {
1063   PetscBool      iascii;
1064   PetscErrorCode ierr;
1065 
1066   PetscFunctionBegin;
1067   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1068   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1069   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1070   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 #if defined(PETSC_HAVE_PARMETIS)
1075 #include <parmetis.h>
1076 #endif
1077 
1078 #undef __FUNCT__
1079 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1080 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1081 {
1082 #if defined(PETSC_HAVE_PARMETIS)
1083   MPI_Comm       comm;
1084   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1085   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1086   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1087   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1088   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1089   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1090   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1091   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1092   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1093   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1094   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1095   PetscInt       options[5];               /* Options */
1096   /* Outputs */
1097   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1098   PetscInt      *assignment, *points;
1099   PetscMPIInt    rank, p, v, i;
1100   PetscErrorCode ierr;
1101 
1102   PetscFunctionBegin;
1103   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1104   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1105   options[0] = 0; /* Use all defaults */
1106   /* Calculate vertex distribution */
1107   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1108   vtxdist[0] = 0;
1109   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1110   for (p = 2; p <= nparts; ++p) {
1111     vtxdist[p] += vtxdist[p-1];
1112   }
1113   /* Calculate weights */
1114   for (p = 0; p < nparts; ++p) {
1115     tpwgts[p] = 1.0/nparts;
1116   }
1117   ubvec[0] = 1.05;
1118 
1119   if (nparts == 1) {
1120     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1121   } else {
1122     if (vtxdist[1] == vtxdist[nparts]) {
1123       if (!rank) {
1124         PetscStackPush("METIS_PartGraphKway");
1125         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1126         PetscStackPop;
1127         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1128       }
1129     } else {
1130       PetscStackPush("ParMETIS_V3_PartKway");
1131       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1132       PetscStackPop;
1133       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1134     }
1135   }
1136   /* Convert to PetscSection+IS */
1137   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1138   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1139   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1140   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1141   for (p = 0, i = 0; p < nparts; ++p) {
1142     for (v = 0; v < nvtxs; ++v) {
1143       if (assignment[v] == p) points[i++] = v;
1144     }
1145   }
1146   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1147   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1148   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1149 #else
1150   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1151 #endif
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 #undef __FUNCT__
1156 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1157 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1158 {
1159   PetscFunctionBegin;
1160   part->ops->view      = PetscPartitionerView_ParMetis;
1161   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1162   part->ops->partition = PetscPartitionerPartition_ParMetis;
1163   PetscFunctionReturn(0);
1164 }
1165 
1166 /*MC
1167   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1168 
1169   Level: intermediate
1170 
1171 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1172 M*/
1173 
1174 #undef __FUNCT__
1175 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1176 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1177 {
1178   PetscPartitioner_ParMetis *p;
1179   PetscErrorCode          ierr;
1180 
1181   PetscFunctionBegin;
1182   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1183   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1184   part->data = p;
1185 
1186   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1187   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1188   PetscFunctionReturn(0);
1189 }
1190 
1191 #undef __FUNCT__
1192 #define __FUNCT__ "DMPlexGetPartitioner"
1193 /*@
1194   DMPlexGetPartitioner - Get the mesh partitioner
1195 
1196   Not collective
1197 
1198   Input Parameter:
1199 . dm - The DM
1200 
1201   Output Parameter:
1202 . part - The PetscPartitioner
1203 
1204   Level: developer
1205 
1206   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1207 
1208 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1209 @*/
1210 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1211 {
1212   DM_Plex *mesh = (DM_Plex *) dm->data;
1213 
1214   PetscFunctionBegin;
1215   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1216   PetscValidPointer(part, 2);
1217   *part = mesh->partitioner;
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 #undef __FUNCT__
1222 #define __FUNCT__ "DMPlexSetPartitioner"
1223 /*@
1224   DMPlexSetPartitioner - Set the mesh partitioner
1225 
1226   logically collective on dm and part
1227 
1228   Input Parameters:
1229 + dm - The DM
1230 - part - The partitioner
1231 
1232   Level: developer
1233 
1234   Note: Any existing PetscPartitioner will be destroyed.
1235 
1236 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1237 @*/
1238 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1239 {
1240   DM_Plex       *mesh = (DM_Plex *) dm->data;
1241   PetscErrorCode ierr;
1242 
1243   PetscFunctionBegin;
1244   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1245   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1246   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1247   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1248   mesh->partitioner = part;
1249   PetscFunctionReturn(0);
1250 }
1251 
1252 #undef __FUNCT__
1253 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1254 /*@
1255   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1256 
1257   Input Parameters:
1258 + dm     - The DM
1259 - label  - DMLabel assinging ranks to remote roots
1260 
1261   Level: developer
1262 
1263 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1264 @*/
1265 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1266 {
1267   IS              rankIS,   pointIS;
1268   const PetscInt *ranks,   *points;
1269   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1270   PetscInt       *closure = NULL;
1271   PetscErrorCode  ierr;
1272 
1273   PetscFunctionBegin;
1274   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1275   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1276   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1277   for (r = 0; r < numRanks; ++r) {
1278     const PetscInt rank = ranks[r];
1279 
1280     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1281     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1282     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1283     for (p = 0; p < numPoints; ++p) {
1284       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1285       for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);}
1286     }
1287     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1288     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1289   }
1290   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1291   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1292   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1293   PetscFunctionReturn(0);
1294 }
1295 
1296 #undef __FUNCT__
1297 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1298 /*@
1299   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1300 
1301   Input Parameters:
1302 + dm     - The DM
1303 - label  - DMLabel assinging ranks to remote roots
1304 
1305   Level: developer
1306 
1307 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1308 @*/
1309 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1310 {
1311   IS              rankIS,   pointIS;
1312   const PetscInt *ranks,   *points;
1313   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1314   PetscInt       *adj = NULL;
1315   PetscErrorCode  ierr;
1316 
1317   PetscFunctionBegin;
1318   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1319   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1320   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1321   for (r = 0; r < numRanks; ++r) {
1322     const PetscInt rank = ranks[r];
1323 
1324     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1325     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1326     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1327     for (p = 0; p < numPoints; ++p) {
1328       adjSize = PETSC_DETERMINE;
1329       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1330       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1331     }
1332     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1333     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1334   }
1335   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1336   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1337   ierr = PetscFree(adj);CHKERRQ(ierr);
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 #undef __FUNCT__
1342 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1343 /*@
1344   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1345 
1346   Input Parameters:
1347 + dm        - The DM
1348 . rootLabel - DMLabel assinging ranks to local roots
1349 . processSF - A star forest mapping into the local index on each remote rank
1350 
1351   Output Parameter:
1352 - leafLabel - DMLabel assinging ranks to remote roots
1353 
1354   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1355   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1356 
1357   Level: developer
1358 
1359 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1360 @*/
1361 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1362 {
1363   MPI_Comm           comm;
1364   PetscMPIInt        rank, numProcs;
1365   PetscInt           p, n, numNeighbors, size, l, nleaves;
1366   PetscSF            sfPoint;
1367   PetscSFNode       *rootPoints, *leafPoints;
1368   PetscSection       rootSection, leafSection;
1369   const PetscSFNode *remote;
1370   const PetscInt    *local, *neighbors;
1371   IS                 valueIS;
1372   PetscErrorCode     ierr;
1373 
1374   PetscFunctionBegin;
1375   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1376   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1377   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1378   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1379 
1380   /* Convert to (point, rank) and use actual owners */
1381   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1382   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1383   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1384   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1385   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1386   for (n = 0; n < numNeighbors; ++n) {
1387     PetscInt numPoints;
1388 
1389     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1390     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1391   }
1392   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1393   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1394   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1395   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1396   for (n = 0; n < numNeighbors; ++n) {
1397     IS              pointIS;
1398     const PetscInt *points;
1399     PetscInt        off, numPoints, p;
1400 
1401     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1402     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1403     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1404     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1405     for (p = 0; p < numPoints; ++p) {
1406       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1407       else       {l = -1;}
1408       if (l >= 0) {rootPoints[off+p] = remote[l];}
1409       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1410     }
1411     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1412     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1413   }
1414   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1415   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1416   /* Communicate overlap */
1417   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1418   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1419   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1420   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1421   for (p = 0; p < size; p++) {
1422     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1423   }
1424   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1425   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1426   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1427   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 #undef __FUNCT__
1432 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1433 /*@
1434   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1435 
1436   Input Parameters:
1437 + dm    - The DM
1438 . label - DMLabel assinging ranks to remote roots
1439 
1440   Output Parameter:
1441 - sf    - The star forest communication context encapsulating the defined mapping
1442 
1443   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1444 
1445   Level: developer
1446 
1447 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1448 @*/
1449 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1450 {
1451   PetscMPIInt     rank, numProcs;
1452   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1453   PetscSFNode    *remotePoints;
1454   IS              remoteRootIS;
1455   const PetscInt *remoteRoots;
1456   PetscErrorCode ierr;
1457 
1458   PetscFunctionBegin;
1459   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1460   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1461 
1462   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1463     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1464     numRemote += numPoints;
1465   }
1466   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1467   /* Put owned points first */
1468   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1469   if (numPoints > 0) {
1470     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1471     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1472     for (p = 0; p < numPoints; p++) {
1473       remotePoints[idx].index = remoteRoots[p];
1474       remotePoints[idx].rank = rank;
1475       idx++;
1476     }
1477     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1478     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1479   }
1480   /* Now add remote points */
1481   for (n = 0; n < numProcs; ++n) {
1482     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1483     if (numPoints <= 0 || n == rank) continue;
1484     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1485     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1486     for (p = 0; p < numPoints; p++) {
1487       remotePoints[idx].index = remoteRoots[p];
1488       remotePoints[idx].rank = n;
1489       idx++;
1490     }
1491     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1492     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1493   }
1494   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1495   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1496   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1497   PetscFunctionReturn(0);
1498 }
1499