xref: /petsc/src/dm/impls/plex/plexpartition.c (revision bfc4c25e64040512a7cdc7bcb995f785926f51e6)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices     - Number of vertices in the graph
40 . offsets         - Point offsets in the graph
41 . adjacency       - Point connectivity in the graph
42 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
43 
44   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
45   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
46   representation on the mesh.
47 
48   Level: developer
49 
50 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
51 @*/
52 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
53 {
54   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
55   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
56   IS             cellNumbering;
57   const PetscInt *cellNum;
58   PetscBool      useCone, useClosure;
59   PetscSection   section;
60   PetscSegBuffer adjBuffer;
61   PetscSF        sfPoint;
62   PetscMPIInt    rank;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
67   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
68   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
69   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
70   /* Build adjacency graph via a section/segbuffer */
71   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
72   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
73   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
74   /* Always use FVM adjacency to create partitioner graph */
75   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
76   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
78   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
79   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
80   if (globalNumbering) {
81     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
82     *globalNumbering = cellNumbering;
83   }
84   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
85   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
86     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
87     if (nroots > 0) {if (cellNum[p] < 0) continue;}
88     adjSize = PETSC_DETERMINE;
89     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
90     for (a = 0; a < adjSize; ++a) {
91       const PetscInt point = adj[a];
92       if (point != p && pStart <= point && point < pEnd) {
93         PetscInt *PETSC_RESTRICT pBuf;
94         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
95         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
96         *pBuf = point;
97       }
98     }
99     (*numVertices)++;
100   }
101   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
102   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
103   /* Derive CSR graph from section/segbuffer */
104   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
105   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
106   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
107   for (idx = 0, p = pStart; p < pEnd; p++) {
108     if (nroots > 0) {if (cellNum[p] < 0) continue;}
109     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
110   }
111   vOffsets[*numVertices] = size;
112   if (offsets) *offsets = vOffsets;
113   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
114   {
115     ISLocalToGlobalMapping ltogCells;
116     PetscInt n, size, *cells_arr;
117     /* In parallel, apply a global cell numbering to the graph */
118     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
121     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     /* Convert to positive global cell numbers */
123     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
124     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
126     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
127     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
128   }
129   if (adjacency) *adjacency = graph;
130   /* Clean up */
131   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
132   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
133   ierr = PetscFree(adj);CHKERRQ(ierr);
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "DMPlexCreateNeighborCSR"
139 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
140 {
141   const PetscInt maxFaceCases = 30;
142   PetscInt       numFaceCases = 0;
143   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
144   PetscInt      *off, *adj;
145   PetscInt      *neighborCells = NULL;
146   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
147   PetscErrorCode ierr;
148 
149   PetscFunctionBegin;
150   /* For parallel partitioning, I think you have to communicate supports */
151   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
152   cellDim = dim - cellHeight;
153   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
154   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
155   if (cEnd - cStart == 0) {
156     if (numVertices) *numVertices = 0;
157     if (offsets)   *offsets   = NULL;
158     if (adjacency) *adjacency = NULL;
159     PetscFunctionReturn(0);
160   }
161   numCells  = cEnd - cStart;
162   faceDepth = depth - cellHeight;
163   if (dim == depth) {
164     PetscInt f, fStart, fEnd;
165 
166     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
167     /* Count neighboring cells */
168     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
169     for (f = fStart; f < fEnd; ++f) {
170       const PetscInt *support;
171       PetscInt        supportSize;
172       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
173       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
174       if (supportSize == 2) {
175         PetscInt numChildren;
176 
177         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
178         if (!numChildren) {
179           ++off[support[0]-cStart+1];
180           ++off[support[1]-cStart+1];
181         }
182       }
183     }
184     /* Prefix sum */
185     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
186     if (adjacency) {
187       PetscInt *tmp;
188 
189       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
190       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
191       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
192       /* Get neighboring cells */
193       for (f = fStart; f < fEnd; ++f) {
194         const PetscInt *support;
195         PetscInt        supportSize;
196         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
197         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
198         if (supportSize == 2) {
199           PetscInt numChildren;
200 
201           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
202           if (!numChildren) {
203             adj[tmp[support[0]-cStart]++] = support[1];
204             adj[tmp[support[1]-cStart]++] = support[0];
205           }
206         }
207       }
208 #if defined(PETSC_USE_DEBUG)
209       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
210 #endif
211       ierr = PetscFree(tmp);CHKERRQ(ierr);
212     }
213     if (numVertices) *numVertices = numCells;
214     if (offsets)   *offsets   = off;
215     if (adjacency) *adjacency = adj;
216     PetscFunctionReturn(0);
217   }
218   /* Setup face recognition */
219   if (faceDepth == 1) {
220     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
221 
222     for (c = cStart; c < cEnd; ++c) {
223       PetscInt corners;
224 
225       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
226       if (!cornersSeen[corners]) {
227         PetscInt nFV;
228 
229         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
230         cornersSeen[corners] = 1;
231 
232         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
233 
234         numFaceVertices[numFaceCases++] = nFV;
235       }
236     }
237   }
238   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
239   /* Count neighboring cells */
240   for (cell = cStart; cell < cEnd; ++cell) {
241     PetscInt numNeighbors = PETSC_DETERMINE, n;
242 
243     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
244     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
245     for (n = 0; n < numNeighbors; ++n) {
246       PetscInt        cellPair[2];
247       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
248       PetscInt        meetSize = 0;
249       const PetscInt *meet    = NULL;
250 
251       cellPair[0] = cell; cellPair[1] = neighborCells[n];
252       if (cellPair[0] == cellPair[1]) continue;
253       if (!found) {
254         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
255         if (meetSize) {
256           PetscInt f;
257 
258           for (f = 0; f < numFaceCases; ++f) {
259             if (numFaceVertices[f] == meetSize) {
260               found = PETSC_TRUE;
261               break;
262             }
263           }
264         }
265         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
266       }
267       if (found) ++off[cell-cStart+1];
268     }
269   }
270   /* Prefix sum */
271   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
272 
273   if (adjacency) {
274     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
275     /* Get neighboring cells */
276     for (cell = cStart; cell < cEnd; ++cell) {
277       PetscInt numNeighbors = PETSC_DETERMINE, n;
278       PetscInt cellOffset   = 0;
279 
280       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
281       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
282       for (n = 0; n < numNeighbors; ++n) {
283         PetscInt        cellPair[2];
284         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
285         PetscInt        meetSize = 0;
286         const PetscInt *meet    = NULL;
287 
288         cellPair[0] = cell; cellPair[1] = neighborCells[n];
289         if (cellPair[0] == cellPair[1]) continue;
290         if (!found) {
291           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
292           if (meetSize) {
293             PetscInt f;
294 
295             for (f = 0; f < numFaceCases; ++f) {
296               if (numFaceVertices[f] == meetSize) {
297                 found = PETSC_TRUE;
298                 break;
299               }
300             }
301           }
302           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
303         }
304         if (found) {
305           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
306           ++cellOffset;
307         }
308       }
309     }
310   }
311   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
312   if (numVertices) *numVertices = numCells;
313   if (offsets)   *offsets   = off;
314   if (adjacency) *adjacency = adj;
315   PetscFunctionReturn(0);
316 }
317 
318 #undef __FUNCT__
319 #define __FUNCT__ "PetscPartitionerRegister"
320 /*@C
321   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
322 
323   Not Collective
324 
325   Input Parameters:
326 + name        - The name of a new user-defined creation routine
327 - create_func - The creation routine itself
328 
329   Notes:
330   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
331 
332   Sample usage:
333 .vb
334     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
335 .ve
336 
337   Then, your PetscPartitioner type can be chosen with the procedural interface via
338 .vb
339     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
340     PetscPartitionerSetType(PetscPartitioner, "my_part");
341 .ve
342    or at runtime via the option
343 .vb
344     -petscpartitioner_type my_part
345 .ve
346 
347   Level: advanced
348 
349 .keywords: PetscPartitioner, register
350 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
351 
352 @*/
353 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
354 {
355   PetscErrorCode ierr;
356 
357   PetscFunctionBegin;
358   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "PetscPartitionerSetType"
364 /*@C
365   PetscPartitionerSetType - Builds a particular PetscPartitioner
366 
367   Collective on PetscPartitioner
368 
369   Input Parameters:
370 + part - The PetscPartitioner object
371 - name - The kind of partitioner
372 
373   Options Database Key:
374 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
375 
376   Level: intermediate
377 
378 .keywords: PetscPartitioner, set, type
379 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
380 @*/
381 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
382 {
383   PetscErrorCode (*r)(PetscPartitioner);
384   PetscBool      match;
385   PetscErrorCode ierr;
386 
387   PetscFunctionBegin;
388   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
389   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
390   if (match) PetscFunctionReturn(0);
391 
392   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
393   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
394   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
395 
396   if (part->ops->destroy) {
397     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
398     part->ops->destroy = NULL;
399   }
400   ierr = (*r)(part);CHKERRQ(ierr);
401   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
402   PetscFunctionReturn(0);
403 }
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "PetscPartitionerGetType"
407 /*@C
408   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
409 
410   Not Collective
411 
412   Input Parameter:
413 . part - The PetscPartitioner
414 
415   Output Parameter:
416 . name - The PetscPartitioner type name
417 
418   Level: intermediate
419 
420 .keywords: PetscPartitioner, get, type, name
421 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
422 @*/
423 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
424 {
425   PetscErrorCode ierr;
426 
427   PetscFunctionBegin;
428   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
429   PetscValidPointer(name, 2);
430   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
431   *name = ((PetscObject) part)->type_name;
432   PetscFunctionReturn(0);
433 }
434 
435 #undef __FUNCT__
436 #define __FUNCT__ "PetscPartitionerView"
437 /*@C
438   PetscPartitionerView - Views a PetscPartitioner
439 
440   Collective on PetscPartitioner
441 
442   Input Parameter:
443 + part - the PetscPartitioner object to view
444 - v    - the viewer
445 
446   Level: developer
447 
448 .seealso: PetscPartitionerDestroy()
449 @*/
450 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
451 {
452   PetscErrorCode ierr;
453 
454   PetscFunctionBegin;
455   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
456   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
457   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
458   PetscFunctionReturn(0);
459 }
460 
461 #undef __FUNCT__
462 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
463 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
464 {
465   const char    *defaultType;
466   char           name[256];
467   PetscBool      flg;
468   PetscErrorCode ierr;
469 
470   PetscFunctionBegin;
471   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
472   if (!((PetscObject) part)->type_name)
473 #if defined(PETSC_HAVE_CHACO)
474     defaultType = PETSCPARTITIONERCHACO;
475 #elif defined(PETSC_HAVE_PARMETIS)
476     defaultType = PETSCPARTITIONERPARMETIS;
477 #else
478     defaultType = PETSCPARTITIONERSIMPLE;
479 #endif
480   else
481     defaultType = ((PetscObject) part)->type_name;
482   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
483 
484   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
485   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
486   if (flg) {
487     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
488   } else if (!((PetscObject) part)->type_name) {
489     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
490   }
491   ierr = PetscOptionsEnd();CHKERRQ(ierr);
492   PetscFunctionReturn(0);
493 }
494 
495 #undef __FUNCT__
496 #define __FUNCT__ "PetscPartitionerSetFromOptions"
497 /*@
498   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
499 
500   Collective on PetscPartitioner
501 
502   Input Parameter:
503 . part - the PetscPartitioner object to set options for
504 
505   Level: developer
506 
507 .seealso: PetscPartitionerView()
508 @*/
509 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
510 {
511   PetscErrorCode ierr;
512 
513   PetscFunctionBegin;
514   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
515   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
516 
517   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
518   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
519   /* process any options handlers added with PetscObjectAddOptionsHandler() */
520   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
521   ierr = PetscOptionsEnd();CHKERRQ(ierr);
522   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
523   PetscFunctionReturn(0);
524 }
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "PetscPartitionerSetUp"
528 /*@C
529   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
530 
531   Collective on PetscPartitioner
532 
533   Input Parameter:
534 . part - the PetscPartitioner object to setup
535 
536   Level: developer
537 
538 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
539 @*/
540 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
541 {
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
546   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
547   PetscFunctionReturn(0);
548 }
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "PetscPartitionerDestroy"
552 /*@
553   PetscPartitionerDestroy - Destroys a PetscPartitioner object
554 
555   Collective on PetscPartitioner
556 
557   Input Parameter:
558 . part - the PetscPartitioner object to destroy
559 
560   Level: developer
561 
562 .seealso: PetscPartitionerView()
563 @*/
564 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
565 {
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   if (!*part) PetscFunctionReturn(0);
570   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
571 
572   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
573   ((PetscObject) (*part))->refct = 0;
574 
575   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
576   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "PetscPartitionerCreate"
582 /*@
583   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
584 
585   Collective on MPI_Comm
586 
587   Input Parameter:
588 . comm - The communicator for the PetscPartitioner object
589 
590   Output Parameter:
591 . part - The PetscPartitioner object
592 
593   Level: beginner
594 
595 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
596 @*/
597 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
598 {
599   PetscPartitioner p;
600   PetscErrorCode   ierr;
601 
602   PetscFunctionBegin;
603   PetscValidPointer(part, 2);
604   *part = NULL;
605   ierr = DMInitializePackage();CHKERRQ(ierr);
606 
607   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
608 
609   *part = p;
610   PetscFunctionReturn(0);
611 }
612 
613 #undef __FUNCT__
614 #define __FUNCT__ "PetscPartitionerPartition"
615 /*@
616   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
617 
618   Collective on DM
619 
620   Input Parameters:
621 + part    - The PetscPartitioner
622 - dm      - The mesh DM
623 
624   Output Parameters:
625 + partSection     - The PetscSection giving the division of points by partition
626 - partition       - The list of points by partition
627 
628   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
629 
630   Level: developer
631 
632 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
633 @*/
634 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
635 {
636   PetscMPIInt    size;
637   PetscErrorCode ierr;
638 
639   PetscFunctionBegin;
640   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
641   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
642   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
643   PetscValidPointer(partition, 5);
644   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
645   if (size == 1) {
646     PetscInt *points;
647     PetscInt  cStart, cEnd, c;
648 
649     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
650     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
651     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
652     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
653     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
654     for (c = cStart; c < cEnd; ++c) points[c] = c;
655     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
656     PetscFunctionReturn(0);
657   }
658   if (part->height == 0) {
659     PetscInt numVertices;
660     PetscInt *start     = NULL;
661     PetscInt *adjacency = NULL;
662     IS       globalNumbering;
663 
664     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
665     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
666     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
667     ierr = PetscFree(start);CHKERRQ(ierr);
668     ierr = PetscFree(adjacency);CHKERRQ(ierr);
669     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
670       const PetscInt *globalNum;
671       const PetscInt *partIdx;
672       PetscInt *map, cStart, cEnd;
673       PetscInt *adjusted, i, localSize, offset;
674       IS    newPartition;
675 
676       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
677       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
678       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
679       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
680       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
681       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
682       for (i = cStart, offset = 0; i < cEnd; i++) {
683         if (globalNum[i - cStart] >= 0) map[offset++] = i;
684       }
685       for (i = 0; i < localSize; i++) {
686         adjusted[i] = map[partIdx[i]];
687       }
688       ierr = PetscFree(map);CHKERRQ(ierr);
689       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
690       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
691       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
692       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
693       ierr = ISDestroy(partition);CHKERRQ(ierr);
694       *partition = newPartition;
695     }
696   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
697   PetscFunctionReturn(0);
698 
699 }
700 
701 #undef __FUNCT__
702 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
703 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
704 {
705   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
706   PetscErrorCode          ierr;
707 
708   PetscFunctionBegin;
709   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
710   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
711   ierr = PetscFree(p);CHKERRQ(ierr);
712   PetscFunctionReturn(0);
713 }
714 
715 #undef __FUNCT__
716 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
717 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
718 {
719   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
720   PetscViewerFormat       format;
721   PetscErrorCode          ierr;
722 
723   PetscFunctionBegin;
724   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
725   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
726   if (p->random) {
727     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
728     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
729     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
730   }
731   PetscFunctionReturn(0);
732 }
733 
734 #undef __FUNCT__
735 #define __FUNCT__ "PetscPartitionerView_Shell"
736 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
737 {
738   PetscBool      iascii;
739   PetscErrorCode ierr;
740 
741   PetscFunctionBegin;
742   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
743   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
744   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
745   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
746   PetscFunctionReturn(0);
747 }
748 
749 #undef __FUNCT__
750 #define __FUNCT__ "PetscPartitionerSetFromOptions_Shell"
751 PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
752 {
753   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
754   PetscErrorCode          ierr;
755 
756   PetscFunctionBegin;
757   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
758   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
759   ierr = PetscOptionsTail();CHKERRQ(ierr);
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "PetscPartitionerPartition_Shell"
765 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
766 {
767   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
768   PetscInt                np;
769   PetscErrorCode          ierr;
770 
771   PetscFunctionBegin;
772   if (p->random) {
773     PetscRandom r;
774     PetscInt   *sizes, *points, v;
775 
776     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
777     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (nparts+1));CHKERRQ(ierr);
778     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
779     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
780     for (v = 0; v < numVertices; ++v) {
781       PetscReal val;
782       PetscInt  part;
783 
784       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
785       part = PetscFloorReal(val);
786       points[v] = part;
787       ++sizes[part];
788     }
789     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
790     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
791     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
792   }
793   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
794   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
795   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
796   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
797   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
798   *partition = p->partition;
799   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
800   PetscFunctionReturn(0);
801 }
802 
803 #undef __FUNCT__
804 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
805 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
806 {
807   PetscFunctionBegin;
808   part->ops->view           = PetscPartitionerView_Shell;
809   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
810   part->ops->destroy        = PetscPartitionerDestroy_Shell;
811   part->ops->partition      = PetscPartitionerPartition_Shell;
812   PetscFunctionReturn(0);
813 }
814 
815 /*MC
816   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
817 
818   Level: intermediate
819 
820 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
821 M*/
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "PetscPartitionerCreate_Shell"
825 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
826 {
827   PetscPartitioner_Shell *p;
828   PetscErrorCode          ierr;
829 
830   PetscFunctionBegin;
831   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
832   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
833   part->data = p;
834 
835   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
836   p->random = PETSC_FALSE;
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "PetscPartitionerShellSetPartition"
842 /*@C
843   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
844 
845   Collective on Part
846 
847   Input Parameters:
848 + part     - The PetscPartitioner
849 . numProcs - The number of partitions
850 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
851 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
852 
853   Level: developer
854 
855   Notes:
856 
857     It is safe to free the sizes and points arrays after use in this routine.
858 
859 .seealso DMPlexDistribute(), PetscPartitionerCreate()
860 @*/
861 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
862 {
863   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
864   PetscInt                proc, numPoints;
865   PetscErrorCode          ierr;
866 
867   PetscFunctionBegin;
868   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
869   if (sizes) {PetscValidPointer(sizes, 3);}
870   if (sizes) {PetscValidPointer(points, 4);}
871   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
872   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
873   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
874   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
875   if (sizes) {
876     for (proc = 0; proc < numProcs; ++proc) {
877       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
878     }
879   }
880   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
881   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
882   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 #undef __FUNCT__
887 #define __FUNCT__ "PetscPartitionerShellSetRandom"
888 /*@
889   PetscPartitionerShellSetRandom - Set the flag to use a random partition
890 
891   Collective on Part
892 
893   Input Parameters:
894 + part   - The PetscPartitioner
895 - random - The flag to use a random partition
896 
897   Level: intermediate
898 
899 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
900 @*/
901 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
902 {
903   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
904 
905   PetscFunctionBegin;
906   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
907   p->random = random;
908   PetscFunctionReturn(0);
909 }
910 
911 #undef __FUNCT__
912 #define __FUNCT__ "PetscPartitionerShellGetRandom"
913 /*@
914   PetscPartitionerShellGetRandom - get the flag to use a random partition
915 
916   Collective on Part
917 
918   Input Parameter:
919 . part   - The PetscPartitioner
920 
921   Output Parameter
922 . random - The flag to use a random partition
923 
924   Level: intermediate
925 
926 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
927 @*/
928 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
929 {
930   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
931 
932   PetscFunctionBegin;
933   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
934   PetscValidPointer(random, 2);
935   *random = p->random;
936   PetscFunctionReturn(0);
937 }
938 
939 #undef __FUNCT__
940 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
941 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
942 {
943   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
944   PetscErrorCode          ierr;
945 
946   PetscFunctionBegin;
947   ierr = PetscFree(p);CHKERRQ(ierr);
948   PetscFunctionReturn(0);
949 }
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
953 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
954 {
955   PetscViewerFormat format;
956   PetscErrorCode    ierr;
957 
958   PetscFunctionBegin;
959   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
960   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
961   PetscFunctionReturn(0);
962 }
963 
964 #undef __FUNCT__
965 #define __FUNCT__ "PetscPartitionerView_Simple"
966 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
967 {
968   PetscBool      iascii;
969   PetscErrorCode ierr;
970 
971   PetscFunctionBegin;
972   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
973   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
974   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
975   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
976   PetscFunctionReturn(0);
977 }
978 
979 #undef __FUNCT__
980 #define __FUNCT__ "PetscPartitionerPartition_Simple"
981 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
982 {
983   MPI_Comm       comm;
984   PetscInt       np;
985   PetscMPIInt    size;
986   PetscErrorCode ierr;
987 
988   PetscFunctionBegin;
989   comm = PetscObjectComm((PetscObject)dm);
990   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
991   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
992   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
993   if (size == 1) {
994     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
995   }
996   else {
997     PetscMPIInt rank;
998     PetscInt nvGlobal, *offsets, myFirst, myLast;
999 
1000     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1001     offsets[0] = 0;
1002     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1003     for (np = 2; np <= size; np++) {
1004       offsets[np] += offsets[np-1];
1005     }
1006     nvGlobal = offsets[size];
1007     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1008     myFirst = offsets[rank];
1009     myLast  = offsets[rank + 1] - 1;
1010     ierr = PetscFree(offsets);CHKERRQ(ierr);
1011     if (numVertices) {
1012       PetscInt firstPart = 0, firstLargePart = 0;
1013       PetscInt lastPart = 0, lastLargePart = 0;
1014       PetscInt rem = nvGlobal % nparts;
1015       PetscInt pSmall = nvGlobal/nparts;
1016       PetscInt pBig = nvGlobal/nparts + 1;
1017 
1018 
1019       if (rem) {
1020         firstLargePart = myFirst / pBig;
1021         lastLargePart  = myLast  / pBig;
1022 
1023         if (firstLargePart < rem) {
1024           firstPart = firstLargePart;
1025         }
1026         else {
1027           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1028         }
1029         if (lastLargePart < rem) {
1030           lastPart = lastLargePart;
1031         }
1032         else {
1033           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1034         }
1035       }
1036       else {
1037         firstPart = myFirst / (nvGlobal/nparts);
1038         lastPart  = myLast  / (nvGlobal/nparts);
1039       }
1040 
1041       for (np = firstPart; np <= lastPart; np++) {
1042         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1043         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1044 
1045         PartStart = PetscMax(PartStart,myFirst);
1046         PartEnd   = PetscMin(PartEnd,myLast+1);
1047         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1048       }
1049     }
1050   }
1051   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 #undef __FUNCT__
1056 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
1057 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1058 {
1059   PetscFunctionBegin;
1060   part->ops->view      = PetscPartitionerView_Simple;
1061   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1062   part->ops->partition = PetscPartitionerPartition_Simple;
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 /*MC
1067   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1068 
1069   Level: intermediate
1070 
1071 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1072 M*/
1073 
1074 #undef __FUNCT__
1075 #define __FUNCT__ "PetscPartitionerCreate_Simple"
1076 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1077 {
1078   PetscPartitioner_Simple *p;
1079   PetscErrorCode           ierr;
1080 
1081   PetscFunctionBegin;
1082   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1083   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1084   part->data = p;
1085 
1086   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1087   PetscFunctionReturn(0);
1088 }
1089 
1090 #undef __FUNCT__
1091 #define __FUNCT__ "PetscPartitionerDestroy_Gather"
1092 PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1093 {
1094   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1095   PetscErrorCode          ierr;
1096 
1097   PetscFunctionBegin;
1098   ierr = PetscFree(p);CHKERRQ(ierr);
1099   PetscFunctionReturn(0);
1100 }
1101 
1102 #undef __FUNCT__
1103 #define __FUNCT__ "PetscPartitionerView_Gather_Ascii"
1104 PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1105 {
1106   PetscViewerFormat format;
1107   PetscErrorCode    ierr;
1108 
1109   PetscFunctionBegin;
1110   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1111   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1112   PetscFunctionReturn(0);
1113 }
1114 
1115 #undef __FUNCT__
1116 #define __FUNCT__ "PetscPartitionerView_Gather"
1117 PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1118 {
1119   PetscBool      iascii;
1120   PetscErrorCode ierr;
1121 
1122   PetscFunctionBegin;
1123   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1124   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1125   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1126   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1127   PetscFunctionReturn(0);
1128 }
1129 
1130 #undef __FUNCT__
1131 #define __FUNCT__ "PetscPartitionerPartition_Gather"
1132 PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1133 {
1134   PetscInt       np;
1135   PetscErrorCode ierr;
1136 
1137   PetscFunctionBegin;
1138   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1139   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1140   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1141   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1142   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1143   PetscFunctionReturn(0);
1144 }
1145 
1146 #undef __FUNCT__
1147 #define __FUNCT__ "PetscPartitionerInitialize_Gather"
1148 PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1149 {
1150   PetscFunctionBegin;
1151   part->ops->view      = PetscPartitionerView_Gather;
1152   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1153   part->ops->partition = PetscPartitionerPartition_Gather;
1154   PetscFunctionReturn(0);
1155 }
1156 
1157 /*MC
1158   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1159 
1160   Level: intermediate
1161 
1162 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1163 M*/
1164 
1165 #undef __FUNCT__
1166 #define __FUNCT__ "PetscPartitionerCreate_Gather"
1167 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1168 {
1169   PetscPartitioner_Gather *p;
1170   PetscErrorCode           ierr;
1171 
1172   PetscFunctionBegin;
1173   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1174   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1175   part->data = p;
1176 
1177   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1178   PetscFunctionReturn(0);
1179 }
1180 
1181 
1182 #undef __FUNCT__
1183 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
1184 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1185 {
1186   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1187   PetscErrorCode          ierr;
1188 
1189   PetscFunctionBegin;
1190   ierr = PetscFree(p);CHKERRQ(ierr);
1191   PetscFunctionReturn(0);
1192 }
1193 
1194 #undef __FUNCT__
1195 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
1196 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1197 {
1198   PetscViewerFormat format;
1199   PetscErrorCode    ierr;
1200 
1201   PetscFunctionBegin;
1202   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1203   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1204   PetscFunctionReturn(0);
1205 }
1206 
1207 #undef __FUNCT__
1208 #define __FUNCT__ "PetscPartitionerView_Chaco"
1209 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1210 {
1211   PetscBool      iascii;
1212   PetscErrorCode ierr;
1213 
1214   PetscFunctionBegin;
1215   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1216   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1217   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1218   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1219   PetscFunctionReturn(0);
1220 }
1221 
1222 #if defined(PETSC_HAVE_CHACO)
1223 #if defined(PETSC_HAVE_UNISTD_H)
1224 #include <unistd.h>
1225 #endif
1226 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1227 #include <chaco.h>
1228 #else
1229 /* Older versions of Chaco do not have an include file */
1230 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1231                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1232                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1233                        int mesh_dims[3], double *goal, int global_method, int local_method,
1234                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1235 #endif
1236 extern int FREE_GRAPH;
1237 #endif
1238 
1239 #undef __FUNCT__
1240 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1241 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1242 {
1243 #if defined(PETSC_HAVE_CHACO)
1244   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1245   MPI_Comm       comm;
1246   int            nvtxs          = numVertices; /* number of vertices in full graph */
1247   int           *vwgts          = NULL;   /* weights for all vertices */
1248   float         *ewgts          = NULL;   /* weights for all edges */
1249   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1250   char          *outassignname  = NULL;   /*  name of assignment output file */
1251   char          *outfilename    = NULL;   /* output file name */
1252   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1253   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1254   int            mesh_dims[3];            /* dimensions of mesh of processors */
1255   double        *goal          = NULL;    /* desired set sizes for each set */
1256   int            global_method = 1;       /* global partitioning algorithm */
1257   int            local_method  = 1;       /* local partitioning algorithm */
1258   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1259   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1260   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1261   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1262   long           seed          = 123636512; /* for random graph mutations */
1263 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1264   int           *assignment;              /* Output partition */
1265 #else
1266   short int     *assignment;              /* Output partition */
1267 #endif
1268   int            fd_stdout, fd_pipe[2];
1269   PetscInt      *points;
1270   int            i, v, p;
1271   PetscErrorCode ierr;
1272 
1273   PetscFunctionBegin;
1274   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1275   if (!numVertices) {
1276     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1277     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1278     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1279     PetscFunctionReturn(0);
1280   }
1281   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1282   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1283 
1284   if (global_method == INERTIAL_METHOD) {
1285     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1286     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1287   }
1288   mesh_dims[0] = nparts;
1289   mesh_dims[1] = 1;
1290   mesh_dims[2] = 1;
1291   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1292   /* Chaco outputs to stdout. We redirect this to a buffer. */
1293   /* TODO: check error codes for UNIX calls */
1294 #if defined(PETSC_HAVE_UNISTD_H)
1295   {
1296     int piperet;
1297     piperet = pipe(fd_pipe);
1298     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1299     fd_stdout = dup(1);
1300     close(1);
1301     dup2(fd_pipe[1], 1);
1302   }
1303 #endif
1304   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1305                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1306                    vmax, ndims, eigtol, seed);
1307 #if defined(PETSC_HAVE_UNISTD_H)
1308   {
1309     char msgLog[10000];
1310     int  count;
1311 
1312     fflush(stdout);
1313     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1314     if (count < 0) count = 0;
1315     msgLog[count] = 0;
1316     close(1);
1317     dup2(fd_stdout, 1);
1318     close(fd_stdout);
1319     close(fd_pipe[0]);
1320     close(fd_pipe[1]);
1321     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1322   }
1323 #endif
1324   /* Convert to PetscSection+IS */
1325   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1326   for (v = 0; v < nvtxs; ++v) {
1327     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1328   }
1329   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1330   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1331   for (p = 0, i = 0; p < nparts; ++p) {
1332     for (v = 0; v < nvtxs; ++v) {
1333       if (assignment[v] == p) points[i++] = v;
1334     }
1335   }
1336   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1337   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1338   if (global_method == INERTIAL_METHOD) {
1339     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1340   }
1341   ierr = PetscFree(assignment);CHKERRQ(ierr);
1342   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1343   PetscFunctionReturn(0);
1344 #else
1345   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1346 #endif
1347 }
1348 
1349 #undef __FUNCT__
1350 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1351 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1352 {
1353   PetscFunctionBegin;
1354   part->ops->view      = PetscPartitionerView_Chaco;
1355   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1356   part->ops->partition = PetscPartitionerPartition_Chaco;
1357   PetscFunctionReturn(0);
1358 }
1359 
1360 /*MC
1361   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1362 
1363   Level: intermediate
1364 
1365 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1366 M*/
1367 
1368 #undef __FUNCT__
1369 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1370 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1371 {
1372   PetscPartitioner_Chaco *p;
1373   PetscErrorCode          ierr;
1374 
1375   PetscFunctionBegin;
1376   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1377   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1378   part->data = p;
1379 
1380   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1381   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1382   PetscFunctionReturn(0);
1383 }
1384 
1385 #undef __FUNCT__
1386 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1387 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1388 {
1389   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1390   PetscErrorCode             ierr;
1391 
1392   PetscFunctionBegin;
1393   ierr = PetscFree(p);CHKERRQ(ierr);
1394   PetscFunctionReturn(0);
1395 }
1396 
1397 #undef __FUNCT__
1398 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1399 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1400 {
1401   PetscViewerFormat format;
1402   PetscErrorCode    ierr;
1403 
1404   PetscFunctionBegin;
1405   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1406   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 #undef __FUNCT__
1411 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1412 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1413 {
1414   PetscBool      iascii;
1415   PetscErrorCode ierr;
1416 
1417   PetscFunctionBegin;
1418   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1419   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1420   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1421   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 #if defined(PETSC_HAVE_PARMETIS)
1426 #include <parmetis.h>
1427 #endif
1428 
1429 #undef __FUNCT__
1430 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1431 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1432 {
1433 #if defined(PETSC_HAVE_PARMETIS)
1434   MPI_Comm       comm;
1435   PetscSection   section;
1436   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1437   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1438   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1439   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1440   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1441   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1442   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1443   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1444   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1445   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1446   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1447   PetscInt       options[5];               /* Options */
1448   /* Outputs */
1449   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1450   PetscInt      *assignment, *points;
1451   PetscMPIInt    rank, p, v, i;
1452   PetscErrorCode ierr;
1453 
1454   PetscFunctionBegin;
1455   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1456   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1457   options[0] = 0; /* Use all defaults */
1458   /* Calculate vertex distribution */
1459   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1460   vtxdist[0] = 0;
1461   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1462   for (p = 2; p <= nparts; ++p) {
1463     vtxdist[p] += vtxdist[p-1];
1464   }
1465   /* Calculate weights */
1466   for (p = 0; p < nparts; ++p) {
1467     tpwgts[p] = 1.0/nparts;
1468   }
1469   ubvec[0] = 1.05;
1470   /* Weight cells by dofs on cell by default */
1471   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1472   if (section) {
1473     PetscInt cStart, cEnd, dof;
1474 
1475     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1476     for (v = cStart; v < cEnd; ++v) {
1477       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1478       vwgt[v-cStart] = PetscMax(dof, 1);
1479     }
1480   } else {
1481     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1482   }
1483 
1484   if (nparts == 1) {
1485     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1486   } else {
1487     if (vtxdist[1] == vtxdist[nparts]) {
1488       if (!rank) {
1489         PetscStackPush("METIS_PartGraphKway");
1490         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1491         PetscStackPop;
1492         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1493       }
1494     } else {
1495       PetscStackPush("ParMETIS_V3_PartKway");
1496       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1497       PetscStackPop;
1498       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1499     }
1500   }
1501   /* Convert to PetscSection+IS */
1502   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1503   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1504   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1505   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1506   for (p = 0, i = 0; p < nparts; ++p) {
1507     for (v = 0; v < nvtxs; ++v) {
1508       if (assignment[v] == p) points[i++] = v;
1509     }
1510   }
1511   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1512   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1513   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1514   PetscFunctionReturn(0);
1515 #else
1516   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1517 #endif
1518 }
1519 
1520 #undef __FUNCT__
1521 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1522 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1523 {
1524   PetscFunctionBegin;
1525   part->ops->view      = PetscPartitionerView_ParMetis;
1526   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1527   part->ops->partition = PetscPartitionerPartition_ParMetis;
1528   PetscFunctionReturn(0);
1529 }
1530 
1531 /*MC
1532   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1533 
1534   Level: intermediate
1535 
1536 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1537 M*/
1538 
1539 #undef __FUNCT__
1540 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1541 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1542 {
1543   PetscPartitioner_ParMetis *p;
1544   PetscErrorCode          ierr;
1545 
1546   PetscFunctionBegin;
1547   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1548   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1549   part->data = p;
1550 
1551   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1552   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 #undef __FUNCT__
1557 #define __FUNCT__ "DMPlexGetPartitioner"
1558 /*@
1559   DMPlexGetPartitioner - Get the mesh partitioner
1560 
1561   Not collective
1562 
1563   Input Parameter:
1564 . dm - The DM
1565 
1566   Output Parameter:
1567 . part - The PetscPartitioner
1568 
1569   Level: developer
1570 
1571   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1572 
1573 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1574 @*/
1575 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1576 {
1577   DM_Plex *mesh = (DM_Plex *) dm->data;
1578 
1579   PetscFunctionBegin;
1580   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1581   PetscValidPointer(part, 2);
1582   *part = mesh->partitioner;
1583   PetscFunctionReturn(0);
1584 }
1585 
1586 #undef __FUNCT__
1587 #define __FUNCT__ "DMPlexSetPartitioner"
1588 /*@
1589   DMPlexSetPartitioner - Set the mesh partitioner
1590 
1591   logically collective on dm and part
1592 
1593   Input Parameters:
1594 + dm - The DM
1595 - part - The partitioner
1596 
1597   Level: developer
1598 
1599   Note: Any existing PetscPartitioner will be destroyed.
1600 
1601 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1602 @*/
1603 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1604 {
1605   DM_Plex       *mesh = (DM_Plex *) dm->data;
1606   PetscErrorCode ierr;
1607 
1608   PetscFunctionBegin;
1609   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1610   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1611   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1612   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1613   mesh->partitioner = part;
1614   PetscFunctionReturn(0);
1615 }
1616 
1617 #undef __FUNCT__
1618 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1619 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1620 {
1621   PetscErrorCode ierr;
1622 
1623   PetscFunctionBegin;
1624   if (up) {
1625     PetscInt parent;
1626 
1627     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1628     if (parent != point) {
1629       PetscInt closureSize, *closure = NULL, i;
1630 
1631       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1632       for (i = 0; i < closureSize; i++) {
1633         PetscInt cpoint = closure[2*i];
1634 
1635         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1636         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1637       }
1638       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1639     }
1640   }
1641   if (down) {
1642     PetscInt numChildren;
1643     const PetscInt *children;
1644 
1645     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1646     if (numChildren) {
1647       PetscInt i;
1648 
1649       for (i = 0; i < numChildren; i++) {
1650         PetscInt cpoint = children[i];
1651 
1652         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1653         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1654       }
1655     }
1656   }
1657   PetscFunctionReturn(0);
1658 }
1659 
1660 #undef __FUNCT__
1661 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1662 /*@
1663   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1664 
1665   Input Parameters:
1666 + dm     - The DM
1667 - label  - DMLabel assinging ranks to remote roots
1668 
1669   Level: developer
1670 
1671 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1672 @*/
1673 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1674 {
1675   IS              rankIS,   pointIS;
1676   const PetscInt *ranks,   *points;
1677   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1678   PetscInt       *closure = NULL;
1679   PetscErrorCode  ierr;
1680 
1681   PetscFunctionBegin;
1682   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1683   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1684   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1685   for (r = 0; r < numRanks; ++r) {
1686     const PetscInt rank = ranks[r];
1687 
1688     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1689     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1690     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1691     for (p = 0; p < numPoints; ++p) {
1692       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1693       for (c = 0; c < closureSize*2; c += 2) {
1694         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1695         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1696       }
1697     }
1698     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1699     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1700   }
1701   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1702   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1703   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1704   PetscFunctionReturn(0);
1705 }
1706 
1707 #undef __FUNCT__
1708 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1709 /*@
1710   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1711 
1712   Input Parameters:
1713 + dm     - The DM
1714 - label  - DMLabel assinging ranks to remote roots
1715 
1716   Level: developer
1717 
1718 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1719 @*/
1720 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1721 {
1722   IS              rankIS,   pointIS;
1723   const PetscInt *ranks,   *points;
1724   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1725   PetscInt       *adj = NULL;
1726   PetscErrorCode  ierr;
1727 
1728   PetscFunctionBegin;
1729   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1730   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1731   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1732   for (r = 0; r < numRanks; ++r) {
1733     const PetscInt rank = ranks[r];
1734 
1735     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1736     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1737     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1738     for (p = 0; p < numPoints; ++p) {
1739       adjSize = PETSC_DETERMINE;
1740       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1741       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1742     }
1743     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1744     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1745   }
1746   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1747   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1748   ierr = PetscFree(adj);CHKERRQ(ierr);
1749   PetscFunctionReturn(0);
1750 }
1751 
1752 #undef __FUNCT__
1753 #define __FUNCT__ "DMPlexPartitionLabelPropagate"
1754 /*@
1755   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1756 
1757   Input Parameters:
1758 + dm     - The DM
1759 - label  - DMLabel assinging ranks to remote roots
1760 
1761   Level: developer
1762 
1763   Note: This is required when generating multi-level overlaps to capture
1764   overlap points from non-neighbouring partitions.
1765 
1766 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1767 @*/
1768 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1769 {
1770   MPI_Comm        comm;
1771   PetscMPIInt     rank;
1772   PetscSF         sfPoint;
1773   DMLabel         lblRoots, lblLeaves;
1774   IS              rankIS, pointIS;
1775   const PetscInt *ranks;
1776   PetscInt        numRanks, r;
1777   PetscErrorCode  ierr;
1778 
1779   PetscFunctionBegin;
1780   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1781   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1782   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1783   /* Pull point contributions from remote leaves into local roots */
1784   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1785   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1786   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1787   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1788   for (r = 0; r < numRanks; ++r) {
1789     const PetscInt remoteRank = ranks[r];
1790     if (remoteRank == rank) continue;
1791     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1792     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1793     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1794   }
1795   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1796   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1797   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1798   /* Push point contributions from roots into remote leaves */
1799   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1800   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1801   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1802   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1803   for (r = 0; r < numRanks; ++r) {
1804     const PetscInt remoteRank = ranks[r];
1805     if (remoteRank == rank) continue;
1806     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1807     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1808     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1809   }
1810   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1811   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1812   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1813   PetscFunctionReturn(0);
1814 }
1815 
1816 #undef __FUNCT__
1817 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1818 /*@
1819   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1820 
1821   Input Parameters:
1822 + dm        - The DM
1823 . rootLabel - DMLabel assinging ranks to local roots
1824 . processSF - A star forest mapping into the local index on each remote rank
1825 
1826   Output Parameter:
1827 - leafLabel - DMLabel assinging ranks to remote roots
1828 
1829   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1830   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1831 
1832   Level: developer
1833 
1834 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1835 @*/
1836 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1837 {
1838   MPI_Comm           comm;
1839   PetscMPIInt        rank, numProcs;
1840   PetscInt           p, n, numNeighbors, size, l, nleaves;
1841   PetscSF            sfPoint;
1842   PetscSFNode       *rootPoints, *leafPoints;
1843   PetscSection       rootSection, leafSection;
1844   const PetscSFNode *remote;
1845   const PetscInt    *local, *neighbors;
1846   IS                 valueIS;
1847   PetscErrorCode     ierr;
1848 
1849   PetscFunctionBegin;
1850   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1851   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1852   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1853   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1854 
1855   /* Convert to (point, rank) and use actual owners */
1856   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1857   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1858   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1859   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1860   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1861   for (n = 0; n < numNeighbors; ++n) {
1862     PetscInt numPoints;
1863 
1864     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1865     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1866   }
1867   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1868   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1869   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1870   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1871   for (n = 0; n < numNeighbors; ++n) {
1872     IS              pointIS;
1873     const PetscInt *points;
1874     PetscInt        off, numPoints, p;
1875 
1876     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1877     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1878     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1879     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1880     for (p = 0; p < numPoints; ++p) {
1881       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1882       else       {l = -1;}
1883       if (l >= 0) {rootPoints[off+p] = remote[l];}
1884       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1885     }
1886     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1887     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1888   }
1889   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1890   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1891   /* Communicate overlap */
1892   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1893   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1894   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1895   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1896   for (p = 0; p < size; p++) {
1897     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1898   }
1899   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1900   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1901   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1902   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1903   PetscFunctionReturn(0);
1904 }
1905 
1906 #undef __FUNCT__
1907 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1908 /*@
1909   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1910 
1911   Input Parameters:
1912 + dm    - The DM
1913 . label - DMLabel assinging ranks to remote roots
1914 
1915   Output Parameter:
1916 - sf    - The star forest communication context encapsulating the defined mapping
1917 
1918   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1919 
1920   Level: developer
1921 
1922 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1923 @*/
1924 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1925 {
1926   PetscMPIInt     rank, numProcs;
1927   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1928   PetscSFNode    *remotePoints;
1929   IS              remoteRootIS;
1930   const PetscInt *remoteRoots;
1931   PetscErrorCode ierr;
1932 
1933   PetscFunctionBegin;
1934   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1935   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1936 
1937   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1938     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1939     numRemote += numPoints;
1940   }
1941   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1942   /* Put owned points first */
1943   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1944   if (numPoints > 0) {
1945     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1946     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1947     for (p = 0; p < numPoints; p++) {
1948       remotePoints[idx].index = remoteRoots[p];
1949       remotePoints[idx].rank = rank;
1950       idx++;
1951     }
1952     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1953     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1954   }
1955   /* Now add remote points */
1956   for (n = 0; n < numProcs; ++n) {
1957     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1958     if (numPoints <= 0 || n == rank) continue;
1959     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1960     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1961     for (p = 0; p < numPoints; p++) {
1962       remotePoints[idx].index = remoteRoots[p];
1963       remotePoints[idx].rank = n;
1964       idx++;
1965     }
1966     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1967     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1968   }
1969   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1970   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1971   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1972   PetscFunctionReturn(0);
1973 }
1974