xref: /petsc/src/dm/impls/plex/plexpartition.c (revision d244ea2e0b0868d12f8d635edc743b2190e533e6)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices     - Number of vertices in the graph
40 . offsets         - Point offsets in the graph
41 . adjacency       - Point connectivity in the graph
42 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
43 
44   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
45   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
46   representation on the mesh.
47 
48   Level: developer
49 
50 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
51 @*/
52 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
53 {
54   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
55   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
56   IS             cellNumbering;
57   const PetscInt *cellNum;
58   PetscBool      useCone, useClosure;
59   PetscSection   section;
60   PetscSegBuffer adjBuffer;
61   PetscSF        sfPoint;
62   PetscMPIInt    rank;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
67   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
68   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
69   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
70   /* Build adjacency graph via a section/segbuffer */
71   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
72   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
73   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
74   /* Always use FVM adjacency to create partitioner graph */
75   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
76   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
78   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
79   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
80   if (globalNumbering) {
81     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
82     *globalNumbering = cellNumbering;
83   }
84   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
85   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
86     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
87     if (nroots > 0) {if (cellNum[p] < 0) continue;}
88     adjSize = PETSC_DETERMINE;
89     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
90     for (a = 0; a < adjSize; ++a) {
91       const PetscInt point = adj[a];
92       if (point != p && pStart <= point && point < pEnd) {
93         PetscInt *PETSC_RESTRICT pBuf;
94         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
95         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
96         *pBuf = point;
97       }
98     }
99     (*numVertices)++;
100   }
101   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
102   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
103   /* Derive CSR graph from section/segbuffer */
104   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
105   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
106   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
107   for (idx = 0, p = pStart; p < pEnd; p++) {
108     if (nroots > 0) {if (cellNum[p] < 0) continue;}
109     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
110   }
111   vOffsets[*numVertices] = size;
112   if (offsets) *offsets = vOffsets;
113   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
114   {
115     ISLocalToGlobalMapping ltogCells;
116     PetscInt n, size, *cells_arr;
117     /* In parallel, apply a global cell numbering to the graph */
118     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
121     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     /* Convert to positive global cell numbers */
123     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
124     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
126     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
127     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
128   }
129   if (adjacency) *adjacency = graph;
130   /* Clean up */
131   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
132   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
133   ierr = PetscFree(adj);CHKERRQ(ierr);
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "DMPlexCreateNeighborCSR"
139 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
140 {
141   const PetscInt maxFaceCases = 30;
142   PetscInt       numFaceCases = 0;
143   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
144   PetscInt      *off, *adj;
145   PetscInt      *neighborCells = NULL;
146   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
147   PetscErrorCode ierr;
148 
149   PetscFunctionBegin;
150   /* For parallel partitioning, I think you have to communicate supports */
151   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
152   cellDim = dim - cellHeight;
153   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
154   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
155   if (cEnd - cStart == 0) {
156     if (numVertices) *numVertices = 0;
157     if (offsets)   *offsets   = NULL;
158     if (adjacency) *adjacency = NULL;
159     PetscFunctionReturn(0);
160   }
161   numCells  = cEnd - cStart;
162   faceDepth = depth - cellHeight;
163   if (dim == depth) {
164     PetscInt f, fStart, fEnd;
165 
166     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
167     /* Count neighboring cells */
168     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
169     for (f = fStart; f < fEnd; ++f) {
170       const PetscInt *support;
171       PetscInt        supportSize;
172       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
173       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
174       if (supportSize == 2) {
175         PetscInt numChildren;
176 
177         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
178         if (!numChildren) {
179           ++off[support[0]-cStart+1];
180           ++off[support[1]-cStart+1];
181         }
182       }
183     }
184     /* Prefix sum */
185     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
186     if (adjacency) {
187       PetscInt *tmp;
188 
189       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
190       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
191       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
192       /* Get neighboring cells */
193       for (f = fStart; f < fEnd; ++f) {
194         const PetscInt *support;
195         PetscInt        supportSize;
196         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
197         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
198         if (supportSize == 2) {
199           PetscInt numChildren;
200 
201           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
202           if (!numChildren) {
203             adj[tmp[support[0]-cStart]++] = support[1];
204             adj[tmp[support[1]-cStart]++] = support[0];
205           }
206         }
207       }
208 #if defined(PETSC_USE_DEBUG)
209       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
210 #endif
211       ierr = PetscFree(tmp);CHKERRQ(ierr);
212     }
213     if (numVertices) *numVertices = numCells;
214     if (offsets)   *offsets   = off;
215     if (adjacency) *adjacency = adj;
216     PetscFunctionReturn(0);
217   }
218   /* Setup face recognition */
219   if (faceDepth == 1) {
220     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
221 
222     for (c = cStart; c < cEnd; ++c) {
223       PetscInt corners;
224 
225       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
226       if (!cornersSeen[corners]) {
227         PetscInt nFV;
228 
229         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
230         cornersSeen[corners] = 1;
231 
232         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
233 
234         numFaceVertices[numFaceCases++] = nFV;
235       }
236     }
237   }
238   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
239   /* Count neighboring cells */
240   for (cell = cStart; cell < cEnd; ++cell) {
241     PetscInt numNeighbors = PETSC_DETERMINE, n;
242 
243     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
244     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
245     for (n = 0; n < numNeighbors; ++n) {
246       PetscInt        cellPair[2];
247       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
248       PetscInt        meetSize = 0;
249       const PetscInt *meet    = NULL;
250 
251       cellPair[0] = cell; cellPair[1] = neighborCells[n];
252       if (cellPair[0] == cellPair[1]) continue;
253       if (!found) {
254         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
255         if (meetSize) {
256           PetscInt f;
257 
258           for (f = 0; f < numFaceCases; ++f) {
259             if (numFaceVertices[f] == meetSize) {
260               found = PETSC_TRUE;
261               break;
262             }
263           }
264         }
265         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
266       }
267       if (found) ++off[cell-cStart+1];
268     }
269   }
270   /* Prefix sum */
271   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
272 
273   if (adjacency) {
274     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
275     /* Get neighboring cells */
276     for (cell = cStart; cell < cEnd; ++cell) {
277       PetscInt numNeighbors = PETSC_DETERMINE, n;
278       PetscInt cellOffset   = 0;
279 
280       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
281       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
282       for (n = 0; n < numNeighbors; ++n) {
283         PetscInt        cellPair[2];
284         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
285         PetscInt        meetSize = 0;
286         const PetscInt *meet    = NULL;
287 
288         cellPair[0] = cell; cellPair[1] = neighborCells[n];
289         if (cellPair[0] == cellPair[1]) continue;
290         if (!found) {
291           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
292           if (meetSize) {
293             PetscInt f;
294 
295             for (f = 0; f < numFaceCases; ++f) {
296               if (numFaceVertices[f] == meetSize) {
297                 found = PETSC_TRUE;
298                 break;
299               }
300             }
301           }
302           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
303         }
304         if (found) {
305           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
306           ++cellOffset;
307         }
308       }
309     }
310   }
311   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
312   if (numVertices) *numVertices = numCells;
313   if (offsets)   *offsets   = off;
314   if (adjacency) *adjacency = adj;
315   PetscFunctionReturn(0);
316 }
317 
318 #undef __FUNCT__
319 #define __FUNCT__ "PetscPartitionerRegister"
320 /*@C
321   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
322 
323   Not Collective
324 
325   Input Parameters:
326 + name        - The name of a new user-defined creation routine
327 - create_func - The creation routine itself
328 
329   Notes:
330   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
331 
332   Sample usage:
333 .vb
334     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
335 .ve
336 
337   Then, your PetscPartitioner type can be chosen with the procedural interface via
338 .vb
339     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
340     PetscPartitionerSetType(PetscPartitioner, "my_part");
341 .ve
342    or at runtime via the option
343 .vb
344     -petscpartitioner_type my_part
345 .ve
346 
347   Level: advanced
348 
349 .keywords: PetscPartitioner, register
350 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
351 
352 @*/
353 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
354 {
355   PetscErrorCode ierr;
356 
357   PetscFunctionBegin;
358   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "PetscPartitionerSetType"
364 /*@C
365   PetscPartitionerSetType - Builds a particular PetscPartitioner
366 
367   Collective on PetscPartitioner
368 
369   Input Parameters:
370 + part - The PetscPartitioner object
371 - name - The kind of partitioner
372 
373   Options Database Key:
374 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
375 
376   Level: intermediate
377 
378 .keywords: PetscPartitioner, set, type
379 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
380 @*/
381 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
382 {
383   PetscErrorCode (*r)(PetscPartitioner);
384   PetscBool      match;
385   PetscErrorCode ierr;
386 
387   PetscFunctionBegin;
388   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
389   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
390   if (match) PetscFunctionReturn(0);
391 
392   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
393   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
394   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
395 
396   if (part->ops->destroy) {
397     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
398     part->ops->destroy = NULL;
399   }
400   ierr = (*r)(part);CHKERRQ(ierr);
401   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
402   PetscFunctionReturn(0);
403 }
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "PetscPartitionerGetType"
407 /*@C
408   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
409 
410   Not Collective
411 
412   Input Parameter:
413 . part - The PetscPartitioner
414 
415   Output Parameter:
416 . name - The PetscPartitioner type name
417 
418   Level: intermediate
419 
420 .keywords: PetscPartitioner, get, type, name
421 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
422 @*/
423 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
424 {
425   PetscErrorCode ierr;
426 
427   PetscFunctionBegin;
428   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
429   PetscValidPointer(name, 2);
430   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
431   *name = ((PetscObject) part)->type_name;
432   PetscFunctionReturn(0);
433 }
434 
435 #undef __FUNCT__
436 #define __FUNCT__ "PetscPartitionerView"
437 /*@C
438   PetscPartitionerView - Views a PetscPartitioner
439 
440   Collective on PetscPartitioner
441 
442   Input Parameter:
443 + part - the PetscPartitioner object to view
444 - v    - the viewer
445 
446   Level: developer
447 
448 .seealso: PetscPartitionerDestroy()
449 @*/
450 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
451 {
452   PetscErrorCode ierr;
453 
454   PetscFunctionBegin;
455   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
456   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
457   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
458   PetscFunctionReturn(0);
459 }
460 
461 #undef __FUNCT__
462 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
463 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
464 {
465   const char    *defaultType;
466   char           name[256];
467   PetscBool      flg;
468   PetscErrorCode ierr;
469 
470   PetscFunctionBegin;
471   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
472   if (!((PetscObject) part)->type_name)
473 #if defined(PETSC_HAVE_CHACO)
474     defaultType = PETSCPARTITIONERCHACO;
475 #elif defined(PETSC_HAVE_PARMETIS)
476     defaultType = PETSCPARTITIONERPARMETIS;
477 #else
478     defaultType = PETSCPARTITIONERSIMPLE;
479 #endif
480   else
481     defaultType = ((PetscObject) part)->type_name;
482   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
483 
484   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
485   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
486   if (flg) {
487     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
488   } else if (!((PetscObject) part)->type_name) {
489     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
490   }
491   ierr = PetscOptionsEnd();CHKERRQ(ierr);
492   PetscFunctionReturn(0);
493 }
494 
495 #undef __FUNCT__
496 #define __FUNCT__ "PetscPartitionerSetFromOptions"
497 /*@
498   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
499 
500   Collective on PetscPartitioner
501 
502   Input Parameter:
503 . part - the PetscPartitioner object to set options for
504 
505   Level: developer
506 
507 .seealso: PetscPartitionerView()
508 @*/
509 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
510 {
511   PetscErrorCode ierr;
512 
513   PetscFunctionBegin;
514   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
515   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
516 
517   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
518   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
519   /* process any options handlers added with PetscObjectAddOptionsHandler() */
520   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
521   ierr = PetscOptionsEnd();CHKERRQ(ierr);
522   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
523   PetscFunctionReturn(0);
524 }
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "PetscPartitionerSetUp"
528 /*@C
529   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
530 
531   Collective on PetscPartitioner
532 
533   Input Parameter:
534 . part - the PetscPartitioner object to setup
535 
536   Level: developer
537 
538 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
539 @*/
540 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
541 {
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
546   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
547   PetscFunctionReturn(0);
548 }
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "PetscPartitionerDestroy"
552 /*@
553   PetscPartitionerDestroy - Destroys a PetscPartitioner object
554 
555   Collective on PetscPartitioner
556 
557   Input Parameter:
558 . part - the PetscPartitioner object to destroy
559 
560   Level: developer
561 
562 .seealso: PetscPartitionerView()
563 @*/
564 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
565 {
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   if (!*part) PetscFunctionReturn(0);
570   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
571 
572   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
573   ((PetscObject) (*part))->refct = 0;
574 
575   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
576   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "PetscPartitionerCreate"
582 /*@
583   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
584 
585   Collective on MPI_Comm
586 
587   Input Parameter:
588 . comm - The communicator for the PetscPartitioner object
589 
590   Output Parameter:
591 . part - The PetscPartitioner object
592 
593   Level: beginner
594 
595 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
596 @*/
597 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
598 {
599   PetscPartitioner p;
600   PetscErrorCode   ierr;
601 
602   PetscFunctionBegin;
603   PetscValidPointer(part, 2);
604   *part = NULL;
605   ierr = DMInitializePackage();CHKERRQ(ierr);
606 
607   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
608 
609   *part = p;
610   PetscFunctionReturn(0);
611 }
612 
613 #undef __FUNCT__
614 #define __FUNCT__ "PetscPartitionerPartition"
615 /*@
616   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
617 
618   Collective on DM
619 
620   Input Parameters:
621 + part    - The PetscPartitioner
622 - dm      - The mesh DM
623 
624   Output Parameters:
625 + partSection     - The PetscSection giving the division of points by partition
626 - partition       - The list of points by partition
627 
628   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
629 
630   Level: developer
631 
632 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
633 @*/
634 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
635 {
636   PetscMPIInt    size;
637   PetscErrorCode ierr;
638 
639   PetscFunctionBegin;
640   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
641   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
642   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
643   PetscValidPointer(partition, 5);
644   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
645   if (size == 1) {
646     PetscInt *points;
647     PetscInt  cStart, cEnd, c;
648 
649     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
650     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
651     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
652     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
653     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
654     for (c = cStart; c < cEnd; ++c) points[c] = c;
655     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
656     PetscFunctionReturn(0);
657   }
658   if (part->height == 0) {
659     PetscInt numVertices;
660     PetscInt *start     = NULL;
661     PetscInt *adjacency = NULL;
662     IS       globalNumbering;
663 
664     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
665     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
666     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
667     ierr = PetscFree(start);CHKERRQ(ierr);
668     ierr = PetscFree(adjacency);CHKERRQ(ierr);
669     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
670       const PetscInt *globalNum;
671       const PetscInt *partIdx;
672       PetscInt *map, cStart, cEnd;
673       PetscInt *adjusted, i, localSize, offset;
674       IS    newPartition;
675 
676       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
677       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
678       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
679       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
680       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
681       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
682       for (i = cStart, offset = 0; i < cEnd; i++) {
683         if (globalNum[i - cStart] >= 0) map[offset++] = i;
684       }
685       for (i = 0; i < localSize; i++) {
686         adjusted[i] = map[partIdx[i]];
687       }
688       ierr = PetscFree(map);CHKERRQ(ierr);
689       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
690       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
691       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
692       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
693       ierr = ISDestroy(partition);CHKERRQ(ierr);
694       *partition = newPartition;
695     }
696   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
697   PetscFunctionReturn(0);
698 
699 }
700 
701 #undef __FUNCT__
702 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
703 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
704 {
705   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
706   PetscErrorCode          ierr;
707 
708   PetscFunctionBegin;
709   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
710   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
711   ierr = PetscFree(p);CHKERRQ(ierr);
712   PetscFunctionReturn(0);
713 }
714 
715 #undef __FUNCT__
716 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
717 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
718 {
719   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
720   PetscViewerFormat       format;
721   PetscErrorCode          ierr;
722 
723   PetscFunctionBegin;
724   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
725   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
726   if (p->random) {
727     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
728     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
729     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
730   }
731   PetscFunctionReturn(0);
732 }
733 
734 #undef __FUNCT__
735 #define __FUNCT__ "PetscPartitionerView_Shell"
736 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
737 {
738   PetscBool      iascii;
739   PetscErrorCode ierr;
740 
741   PetscFunctionBegin;
742   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
743   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
744   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
745   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
746   PetscFunctionReturn(0);
747 }
748 
749 #undef __FUNCT__
750 #define __FUNCT__ "PetscPartitionerSetFromOptions_Shell"
751 PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
752 {
753   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
754   PetscErrorCode          ierr;
755 
756   PetscFunctionBegin;
757   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
758   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
759   ierr = PetscOptionsTail();CHKERRQ(ierr);
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "PetscPartitionerPartition_Shell"
765 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
766 {
767   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
768   PetscInt                np;
769   PetscErrorCode          ierr;
770 
771   PetscFunctionBegin;
772   if (p->random) {
773     PetscRandom r;
774     PetscInt   *sizes, *points, v;
775 
776     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
777     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (nparts+1));CHKERRQ(ierr);
778     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
779     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
780     for (v = 0; v < numVertices; ++v) {
781       PetscReal val;
782       PetscInt  part;
783 
784       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
785       part = PetscFloorReal(val);
786       points[v] = part;
787       ++sizes[part];
788     }
789     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
790     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
791     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
792   }
793   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
794   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
795   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
796   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
797   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
798   *partition = p->partition;
799   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
800   PetscFunctionReturn(0);
801 }
802 
803 #undef __FUNCT__
804 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
805 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
806 {
807   PetscFunctionBegin;
808   part->ops->view           = PetscPartitionerView_Shell;
809   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
810   part->ops->destroy        = PetscPartitionerDestroy_Shell;
811   part->ops->partition      = PetscPartitionerPartition_Shell;
812   PetscFunctionReturn(0);
813 }
814 
815 /*MC
816   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
817 
818   Level: intermediate
819 
820 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
821 M*/
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "PetscPartitionerCreate_Shell"
825 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
826 {
827   PetscPartitioner_Shell *p;
828   PetscErrorCode          ierr;
829 
830   PetscFunctionBegin;
831   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
832   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
833   part->data = p;
834 
835   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
836   p->random = PETSC_FALSE;
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "PetscPartitionerShellSetPartition"
842 /*@C
843   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
844 
845   Collective on Part
846 
847   Input Parameters:
848 + part     - The PetscPartitioner
849 . numProcs - The number of partitions
850 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
851 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
852 
853   Level: developer
854 
855   Notes:
856 
857     It is safe to free the sizes and points arrays after use in this routine.
858 
859 .seealso DMPlexDistribute(), PetscPartitionerCreate()
860 @*/
861 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
862 {
863   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
864   PetscInt                proc, numPoints;
865   PetscErrorCode          ierr;
866 
867   PetscFunctionBegin;
868   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
869   if (sizes) {PetscValidPointer(sizes, 3);}
870   if (sizes) {PetscValidPointer(points, 4);}
871   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
872   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
873   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
874   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
875   if (sizes) {
876     for (proc = 0; proc < numProcs; ++proc) {
877       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
878     }
879   }
880   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
881   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
882   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 #undef __FUNCT__
887 #define __FUNCT__ "PetscPartitionerShellSetRandom"
888 /*@
889   PetscPartitionerShellSetRandom - Set the flag to use a random partition
890 
891   Collective on Part
892 
893   Input Parameters:
894 + part   - The PetscPartitioner
895 - random - The flag to use a random partition
896 
897   Level: intermediate
898 
899 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
900 @*/
901 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
902 {
903   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
904 
905   PetscFunctionBegin;
906   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
907   p->random = random;
908   PetscFunctionReturn(0);
909 }
910 
911 #undef __FUNCT__
912 #define __FUNCT__ "PetscPartitionerShellGetRandom"
913 /*@
914   PetscPartitionerShellGetRandom - get the flag to use a random partition
915 
916   Collective on Part
917 
918   Input Parameter:
919 . part   - The PetscPartitioner
920 
921   Output Parameter
922 . random - The flag to use a random partition
923 
924   Level: intermediate
925 
926 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
927 @*/
928 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
929 {
930   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
931 
932   PetscFunctionBegin;
933   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
934   PetscValidPointer(random, 2);
935   *random = p->random;
936   PetscFunctionReturn(0);
937 }
938 
939 #undef __FUNCT__
940 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
941 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
942 {
943   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
944   PetscErrorCode          ierr;
945 
946   PetscFunctionBegin;
947   ierr = PetscFree(p);CHKERRQ(ierr);
948   PetscFunctionReturn(0);
949 }
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
953 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
954 {
955   PetscViewerFormat format;
956   PetscErrorCode    ierr;
957 
958   PetscFunctionBegin;
959   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
960   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
961   PetscFunctionReturn(0);
962 }
963 
964 #undef __FUNCT__
965 #define __FUNCT__ "PetscPartitionerView_Simple"
966 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
967 {
968   PetscBool      iascii;
969   PetscErrorCode ierr;
970 
971   PetscFunctionBegin;
972   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
973   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
974   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
975   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
976   PetscFunctionReturn(0);
977 }
978 
979 #undef __FUNCT__
980 #define __FUNCT__ "PetscPartitionerPartition_Simple"
981 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
982 {
983   MPI_Comm       comm;
984   PetscInt       np;
985   PetscMPIInt    size;
986   PetscErrorCode ierr;
987 
988   PetscFunctionBegin;
989   comm = PetscObjectComm((PetscObject)dm);
990   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
991   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
992   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
993   if (size == 1) {
994     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
995   }
996   else {
997     PetscMPIInt rank;
998     PetscInt nvGlobal, *offsets, myFirst, myLast;
999 
1000     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1001     offsets[0] = 0;
1002     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1003     for (np = 2; np <= size; np++) {
1004       offsets[np] += offsets[np-1];
1005     }
1006     nvGlobal = offsets[size];
1007     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1008     myFirst = offsets[rank];
1009     myLast  = offsets[rank + 1] - 1;
1010     ierr = PetscFree(offsets);CHKERRQ(ierr);
1011     if (numVertices) {
1012       PetscInt firstPart = 0, firstLargePart = 0;
1013       PetscInt lastPart = 0, lastLargePart = 0;
1014       PetscInt rem = nvGlobal % nparts;
1015       PetscInt pSmall = nvGlobal/nparts;
1016       PetscInt pBig = nvGlobal/nparts + 1;
1017 
1018 
1019       if (rem) {
1020         firstLargePart = myFirst / pBig;
1021         lastLargePart  = myLast  / pBig;
1022 
1023         if (firstLargePart < rem) {
1024           firstPart = firstLargePart;
1025         }
1026         else {
1027           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1028         }
1029         if (lastLargePart < rem) {
1030           lastPart = lastLargePart;
1031         }
1032         else {
1033           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1034         }
1035       }
1036       else {
1037         firstPart = myFirst / (nvGlobal/nparts);
1038         lastPart  = myLast  / (nvGlobal/nparts);
1039       }
1040 
1041       for (np = firstPart; np <= lastPart; np++) {
1042         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1043         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1044 
1045         PartStart = PetscMax(PartStart,myFirst);
1046         PartEnd   = PetscMin(PartEnd,myLast+1);
1047         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1048       }
1049     }
1050   }
1051   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 #undef __FUNCT__
1056 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
1057 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1058 {
1059   PetscFunctionBegin;
1060   part->ops->view      = PetscPartitionerView_Simple;
1061   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1062   part->ops->partition = PetscPartitionerPartition_Simple;
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 /*MC
1067   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1068 
1069   Level: intermediate
1070 
1071 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1072 M*/
1073 
1074 #undef __FUNCT__
1075 #define __FUNCT__ "PetscPartitionerCreate_Simple"
1076 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1077 {
1078   PetscPartitioner_Simple *p;
1079   PetscErrorCode           ierr;
1080 
1081   PetscFunctionBegin;
1082   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1083   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1084   part->data = p;
1085 
1086   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1087   PetscFunctionReturn(0);
1088 }
1089 
1090 #undef __FUNCT__
1091 #define __FUNCT__ "PetscPartitionerDestroy_Gather"
1092 PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1093 {
1094   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1095   PetscErrorCode          ierr;
1096 
1097   PetscFunctionBegin;
1098   ierr = PetscFree(p);CHKERRQ(ierr);
1099   PetscFunctionReturn(0);
1100 }
1101 
1102 #undef __FUNCT__
1103 #define __FUNCT__ "PetscPartitionerView_Gather_Ascii"
1104 PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1105 {
1106   PetscViewerFormat format;
1107   PetscErrorCode    ierr;
1108 
1109   PetscFunctionBegin;
1110   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1111   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1112   PetscFunctionReturn(0);
1113 }
1114 
1115 #undef __FUNCT__
1116 #define __FUNCT__ "PetscPartitionerView_Gather"
1117 PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1118 {
1119   PetscBool      iascii;
1120   PetscErrorCode ierr;
1121 
1122   PetscFunctionBegin;
1123   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1124   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1125   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1126   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1127   PetscFunctionReturn(0);
1128 }
1129 
1130 #undef __FUNCT__
1131 #define __FUNCT__ "PetscPartitionerPartition_Gather"
1132 PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1133 {
1134   PetscInt       np;
1135   PetscErrorCode ierr;
1136 
1137   PetscFunctionBegin;
1138   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1139   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1140   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1141   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1142   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1143   PetscFunctionReturn(0);
1144 }
1145 
1146 #undef __FUNCT__
1147 #define __FUNCT__ "PetscPartitionerInitialize_Gather"
1148 PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1149 {
1150   PetscFunctionBegin;
1151   part->ops->view      = PetscPartitionerView_Gather;
1152   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1153   part->ops->partition = PetscPartitionerPartition_Gather;
1154   PetscFunctionReturn(0);
1155 }
1156 
1157 /*MC
1158   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1159 
1160   Level: intermediate
1161 
1162 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1163 M*/
1164 
1165 #undef __FUNCT__
1166 #define __FUNCT__ "PetscPartitionerCreate_Gather"
1167 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1168 {
1169   PetscPartitioner_Gather *p;
1170   PetscErrorCode           ierr;
1171 
1172   PetscFunctionBegin;
1173   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1174   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1175   part->data = p;
1176 
1177   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1178   PetscFunctionReturn(0);
1179 }
1180 
1181 
1182 #undef __FUNCT__
1183 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
1184 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1185 {
1186   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1187   PetscErrorCode          ierr;
1188 
1189   PetscFunctionBegin;
1190   ierr = PetscFree(p);CHKERRQ(ierr);
1191   PetscFunctionReturn(0);
1192 }
1193 
1194 #undef __FUNCT__
1195 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
1196 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1197 {
1198   PetscViewerFormat format;
1199   PetscErrorCode    ierr;
1200 
1201   PetscFunctionBegin;
1202   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1203   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1204   PetscFunctionReturn(0);
1205 }
1206 
1207 #undef __FUNCT__
1208 #define __FUNCT__ "PetscPartitionerView_Chaco"
1209 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1210 {
1211   PetscBool      iascii;
1212   PetscErrorCode ierr;
1213 
1214   PetscFunctionBegin;
1215   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1216   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1217   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1218   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1219   PetscFunctionReturn(0);
1220 }
1221 
1222 #if defined(PETSC_HAVE_CHACO)
1223 #if defined(PETSC_HAVE_UNISTD_H)
1224 #include <unistd.h>
1225 #endif
1226 /* Chaco does not have an include file */
1227 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1228                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1229                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1230                        int mesh_dims[3], double *goal, int global_method, int local_method,
1231                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1232 
1233 extern int FREE_GRAPH;
1234 #endif
1235 
1236 #undef __FUNCT__
1237 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1238 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1239 {
1240 #if defined(PETSC_HAVE_CHACO)
1241   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1242   MPI_Comm       comm;
1243   int            nvtxs          = numVertices; /* number of vertices in full graph */
1244   int           *vwgts          = NULL;   /* weights for all vertices */
1245   float         *ewgts          = NULL;   /* weights for all edges */
1246   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1247   char          *outassignname  = NULL;   /*  name of assignment output file */
1248   char          *outfilename    = NULL;   /* output file name */
1249   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1250   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1251   int            mesh_dims[3];            /* dimensions of mesh of processors */
1252   double        *goal          = NULL;    /* desired set sizes for each set */
1253   int            global_method = 1;       /* global partitioning algorithm */
1254   int            local_method  = 1;       /* local partitioning algorithm */
1255   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1256   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1257   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1258   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1259   long           seed          = 123636512; /* for random graph mutations */
1260   short int     *assignment;              /* Output partition */
1261   int            fd_stdout, fd_pipe[2];
1262   PetscInt      *points;
1263   int            i, v, p;
1264   PetscErrorCode ierr;
1265 
1266   PetscFunctionBegin;
1267   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1268   if (!numVertices) {
1269     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1270     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1271     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1272     PetscFunctionReturn(0);
1273   }
1274   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1275   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1276 
1277   if (global_method == INERTIAL_METHOD) {
1278     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1279     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1280   }
1281   mesh_dims[0] = nparts;
1282   mesh_dims[1] = 1;
1283   mesh_dims[2] = 1;
1284   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1285   /* Chaco outputs to stdout. We redirect this to a buffer. */
1286   /* TODO: check error codes for UNIX calls */
1287 #if defined(PETSC_HAVE_UNISTD_H)
1288   {
1289     int piperet;
1290     piperet = pipe(fd_pipe);
1291     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1292     fd_stdout = dup(1);
1293     close(1);
1294     dup2(fd_pipe[1], 1);
1295   }
1296 #endif
1297   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1298                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1299                    vmax, ndims, eigtol, seed);
1300 #if defined(PETSC_HAVE_UNISTD_H)
1301   {
1302     char msgLog[10000];
1303     int  count;
1304 
1305     fflush(stdout);
1306     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1307     if (count < 0) count = 0;
1308     msgLog[count] = 0;
1309     close(1);
1310     dup2(fd_stdout, 1);
1311     close(fd_stdout);
1312     close(fd_pipe[0]);
1313     close(fd_pipe[1]);
1314     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1315   }
1316 #endif
1317   /* Convert to PetscSection+IS */
1318   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1319   for (v = 0; v < nvtxs; ++v) {
1320     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1321   }
1322   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1323   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1324   for (p = 0, i = 0; p < nparts; ++p) {
1325     for (v = 0; v < nvtxs; ++v) {
1326       if (assignment[v] == p) points[i++] = v;
1327     }
1328   }
1329   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1330   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1331   if (global_method == INERTIAL_METHOD) {
1332     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1333   }
1334   ierr = PetscFree(assignment);CHKERRQ(ierr);
1335   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1336   PetscFunctionReturn(0);
1337 #else
1338   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1339 #endif
1340 }
1341 
1342 #undef __FUNCT__
1343 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1344 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1345 {
1346   PetscFunctionBegin;
1347   part->ops->view      = PetscPartitionerView_Chaco;
1348   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1349   part->ops->partition = PetscPartitionerPartition_Chaco;
1350   PetscFunctionReturn(0);
1351 }
1352 
1353 /*MC
1354   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1355 
1356   Level: intermediate
1357 
1358 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1359 M*/
1360 
1361 #undef __FUNCT__
1362 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1363 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1364 {
1365   PetscPartitioner_Chaco *p;
1366   PetscErrorCode          ierr;
1367 
1368   PetscFunctionBegin;
1369   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1370   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1371   part->data = p;
1372 
1373   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1374   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1375   PetscFunctionReturn(0);
1376 }
1377 
1378 #undef __FUNCT__
1379 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1380 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1381 {
1382   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1383   PetscErrorCode             ierr;
1384 
1385   PetscFunctionBegin;
1386   ierr = PetscFree(p);CHKERRQ(ierr);
1387   PetscFunctionReturn(0);
1388 }
1389 
1390 #undef __FUNCT__
1391 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1392 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1393 {
1394   PetscViewerFormat format;
1395   PetscErrorCode    ierr;
1396 
1397   PetscFunctionBegin;
1398   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1399   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1400   PetscFunctionReturn(0);
1401 }
1402 
1403 #undef __FUNCT__
1404 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1405 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1406 {
1407   PetscBool      iascii;
1408   PetscErrorCode ierr;
1409 
1410   PetscFunctionBegin;
1411   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1412   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1413   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1414   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1415   PetscFunctionReturn(0);
1416 }
1417 
1418 #if defined(PETSC_HAVE_PARMETIS)
1419 #include <parmetis.h>
1420 #endif
1421 
1422 #undef __FUNCT__
1423 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1424 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1425 {
1426 #if defined(PETSC_HAVE_PARMETIS)
1427   MPI_Comm       comm;
1428   PetscSection   section;
1429   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1430   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1431   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1432   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1433   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1434   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1435   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1436   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1437   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1438   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1439   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1440   PetscInt       options[5];               /* Options */
1441   /* Outputs */
1442   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1443   PetscInt      *assignment, *points;
1444   PetscMPIInt    rank, p, v, i;
1445   PetscErrorCode ierr;
1446 
1447   PetscFunctionBegin;
1448   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1449   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1450   options[0] = 0; /* Use all defaults */
1451   /* Calculate vertex distribution */
1452   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1453   vtxdist[0] = 0;
1454   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1455   for (p = 2; p <= nparts; ++p) {
1456     vtxdist[p] += vtxdist[p-1];
1457   }
1458   /* Calculate weights */
1459   for (p = 0; p < nparts; ++p) {
1460     tpwgts[p] = 1.0/nparts;
1461   }
1462   ubvec[0] = 1.05;
1463   /* Weight cells by dofs on cell by default */
1464   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1465   if (section) {
1466     PetscInt cStart, cEnd, dof;
1467 
1468     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1469     for (v = cStart; v < cEnd; ++v) {
1470       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1471       vwgt[v-cStart] = PetscMax(dof, 1);
1472     }
1473   } else {
1474     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1475   }
1476 
1477   if (nparts == 1) {
1478     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1479   } else {
1480     if (vtxdist[1] == vtxdist[nparts]) {
1481       if (!rank) {
1482         PetscStackPush("METIS_PartGraphKway");
1483         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1484         PetscStackPop;
1485         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1486       }
1487     } else {
1488       PetscStackPush("ParMETIS_V3_PartKway");
1489       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1490       PetscStackPop;
1491       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1492     }
1493   }
1494   /* Convert to PetscSection+IS */
1495   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1496   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1497   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1498   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1499   for (p = 0, i = 0; p < nparts; ++p) {
1500     for (v = 0; v < nvtxs; ++v) {
1501       if (assignment[v] == p) points[i++] = v;
1502     }
1503   }
1504   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1505   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1506   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1507   PetscFunctionReturn(0);
1508 #else
1509   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1510 #endif
1511 }
1512 
1513 #undef __FUNCT__
1514 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1515 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1516 {
1517   PetscFunctionBegin;
1518   part->ops->view      = PetscPartitionerView_ParMetis;
1519   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1520   part->ops->partition = PetscPartitionerPartition_ParMetis;
1521   PetscFunctionReturn(0);
1522 }
1523 
1524 /*MC
1525   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1526 
1527   Level: intermediate
1528 
1529 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1530 M*/
1531 
1532 #undef __FUNCT__
1533 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1534 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1535 {
1536   PetscPartitioner_ParMetis *p;
1537   PetscErrorCode          ierr;
1538 
1539   PetscFunctionBegin;
1540   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1541   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1542   part->data = p;
1543 
1544   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1545   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1546   PetscFunctionReturn(0);
1547 }
1548 
1549 #undef __FUNCT__
1550 #define __FUNCT__ "DMPlexGetPartitioner"
1551 /*@
1552   DMPlexGetPartitioner - Get the mesh partitioner
1553 
1554   Not collective
1555 
1556   Input Parameter:
1557 . dm - The DM
1558 
1559   Output Parameter:
1560 . part - The PetscPartitioner
1561 
1562   Level: developer
1563 
1564   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1565 
1566 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1567 @*/
1568 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1569 {
1570   DM_Plex *mesh = (DM_Plex *) dm->data;
1571 
1572   PetscFunctionBegin;
1573   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1574   PetscValidPointer(part, 2);
1575   *part = mesh->partitioner;
1576   PetscFunctionReturn(0);
1577 }
1578 
1579 #undef __FUNCT__
1580 #define __FUNCT__ "DMPlexSetPartitioner"
1581 /*@
1582   DMPlexSetPartitioner - Set the mesh partitioner
1583 
1584   logically collective on dm and part
1585 
1586   Input Parameters:
1587 + dm - The DM
1588 - part - The partitioner
1589 
1590   Level: developer
1591 
1592   Note: Any existing PetscPartitioner will be destroyed.
1593 
1594 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1595 @*/
1596 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1597 {
1598   DM_Plex       *mesh = (DM_Plex *) dm->data;
1599   PetscErrorCode ierr;
1600 
1601   PetscFunctionBegin;
1602   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1603   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1604   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1605   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1606   mesh->partitioner = part;
1607   PetscFunctionReturn(0);
1608 }
1609 
1610 #undef __FUNCT__
1611 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1612 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1613 {
1614   PetscErrorCode ierr;
1615 
1616   PetscFunctionBegin;
1617   if (up) {
1618     PetscInt parent;
1619 
1620     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1621     if (parent != point) {
1622       PetscInt closureSize, *closure = NULL, i;
1623 
1624       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1625       for (i = 0; i < closureSize; i++) {
1626         PetscInt cpoint = closure[2*i];
1627 
1628         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1629         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1630       }
1631       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1632     }
1633   }
1634   if (down) {
1635     PetscInt numChildren;
1636     const PetscInt *children;
1637 
1638     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1639     if (numChildren) {
1640       PetscInt i;
1641 
1642       for (i = 0; i < numChildren; i++) {
1643         PetscInt cpoint = children[i];
1644 
1645         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1646         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1647       }
1648     }
1649   }
1650   PetscFunctionReturn(0);
1651 }
1652 
1653 #undef __FUNCT__
1654 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1655 /*@
1656   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1657 
1658   Input Parameters:
1659 + dm     - The DM
1660 - label  - DMLabel assinging ranks to remote roots
1661 
1662   Level: developer
1663 
1664 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1665 @*/
1666 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1667 {
1668   IS              rankIS,   pointIS;
1669   const PetscInt *ranks,   *points;
1670   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1671   PetscInt       *closure = NULL;
1672   PetscErrorCode  ierr;
1673 
1674   PetscFunctionBegin;
1675   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1676   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1677   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1678   for (r = 0; r < numRanks; ++r) {
1679     const PetscInt rank = ranks[r];
1680 
1681     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1682     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1683     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1684     for (p = 0; p < numPoints; ++p) {
1685       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1686       for (c = 0; c < closureSize*2; c += 2) {
1687         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1688         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1689       }
1690     }
1691     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1692     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1693   }
1694   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1695   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1696   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1697   PetscFunctionReturn(0);
1698 }
1699 
1700 #undef __FUNCT__
1701 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1702 /*@
1703   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1704 
1705   Input Parameters:
1706 + dm     - The DM
1707 - label  - DMLabel assinging ranks to remote roots
1708 
1709   Level: developer
1710 
1711 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1712 @*/
1713 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1714 {
1715   IS              rankIS,   pointIS;
1716   const PetscInt *ranks,   *points;
1717   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1718   PetscInt       *adj = NULL;
1719   PetscErrorCode  ierr;
1720 
1721   PetscFunctionBegin;
1722   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1723   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1724   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1725   for (r = 0; r < numRanks; ++r) {
1726     const PetscInt rank = ranks[r];
1727 
1728     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1729     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1730     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1731     for (p = 0; p < numPoints; ++p) {
1732       adjSize = PETSC_DETERMINE;
1733       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1734       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1735     }
1736     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1737     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1738   }
1739   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1740   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1741   ierr = PetscFree(adj);CHKERRQ(ierr);
1742   PetscFunctionReturn(0);
1743 }
1744 
1745 #undef __FUNCT__
1746 #define __FUNCT__ "DMPlexPartitionLabelPropagate"
1747 /*@
1748   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1749 
1750   Input Parameters:
1751 + dm     - The DM
1752 - label  - DMLabel assinging ranks to remote roots
1753 
1754   Level: developer
1755 
1756   Note: This is required when generating multi-level overlaps to capture
1757   overlap points from non-neighbouring partitions.
1758 
1759 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1760 @*/
1761 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1762 {
1763   MPI_Comm        comm;
1764   PetscMPIInt     rank;
1765   PetscSF         sfPoint;
1766   DMLabel         lblRoots, lblLeaves;
1767   IS              rankIS, pointIS;
1768   const PetscInt *ranks;
1769   PetscInt        numRanks, r;
1770   PetscErrorCode  ierr;
1771 
1772   PetscFunctionBegin;
1773   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1774   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1775   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1776   /* Pull point contributions from remote leaves into local roots */
1777   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1778   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1779   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1780   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1781   for (r = 0; r < numRanks; ++r) {
1782     const PetscInt remoteRank = ranks[r];
1783     if (remoteRank == rank) continue;
1784     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1785     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1786     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1787   }
1788   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1789   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1790   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1791   /* Push point contributions from roots into remote leaves */
1792   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1793   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1794   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1795   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1796   for (r = 0; r < numRanks; ++r) {
1797     const PetscInt remoteRank = ranks[r];
1798     if (remoteRank == rank) continue;
1799     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1800     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1801     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1802   }
1803   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1804   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1805   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1806   PetscFunctionReturn(0);
1807 }
1808 
1809 #undef __FUNCT__
1810 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1811 /*@
1812   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1813 
1814   Input Parameters:
1815 + dm        - The DM
1816 . rootLabel - DMLabel assinging ranks to local roots
1817 . processSF - A star forest mapping into the local index on each remote rank
1818 
1819   Output Parameter:
1820 - leafLabel - DMLabel assinging ranks to remote roots
1821 
1822   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1823   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1824 
1825   Level: developer
1826 
1827 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1828 @*/
1829 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1830 {
1831   MPI_Comm           comm;
1832   PetscMPIInt        rank, numProcs;
1833   PetscInt           p, n, numNeighbors, size, l, nleaves;
1834   PetscSF            sfPoint;
1835   PetscSFNode       *rootPoints, *leafPoints;
1836   PetscSection       rootSection, leafSection;
1837   const PetscSFNode *remote;
1838   const PetscInt    *local, *neighbors;
1839   IS                 valueIS;
1840   PetscErrorCode     ierr;
1841 
1842   PetscFunctionBegin;
1843   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1844   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1845   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1846   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1847 
1848   /* Convert to (point, rank) and use actual owners */
1849   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1850   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1851   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1852   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1853   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1854   for (n = 0; n < numNeighbors; ++n) {
1855     PetscInt numPoints;
1856 
1857     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1858     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1859   }
1860   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1861   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1862   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1863   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1864   for (n = 0; n < numNeighbors; ++n) {
1865     IS              pointIS;
1866     const PetscInt *points;
1867     PetscInt        off, numPoints, p;
1868 
1869     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1870     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1871     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1872     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1873     for (p = 0; p < numPoints; ++p) {
1874       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1875       else       {l = -1;}
1876       if (l >= 0) {rootPoints[off+p] = remote[l];}
1877       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1878     }
1879     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1880     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1881   }
1882   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1883   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1884   /* Communicate overlap */
1885   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1886   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1887   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1888   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1889   for (p = 0; p < size; p++) {
1890     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1891   }
1892   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1893   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1894   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1895   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1896   PetscFunctionReturn(0);
1897 }
1898 
1899 #undef __FUNCT__
1900 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1901 /*@
1902   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1903 
1904   Input Parameters:
1905 + dm    - The DM
1906 . label - DMLabel assinging ranks to remote roots
1907 
1908   Output Parameter:
1909 - sf    - The star forest communication context encapsulating the defined mapping
1910 
1911   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1912 
1913   Level: developer
1914 
1915 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1916 @*/
1917 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1918 {
1919   PetscMPIInt     rank, numProcs;
1920   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1921   PetscSFNode    *remotePoints;
1922   IS              remoteRootIS;
1923   const PetscInt *remoteRoots;
1924   PetscErrorCode ierr;
1925 
1926   PetscFunctionBegin;
1927   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1928   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1929 
1930   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1931     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1932     numRemote += numPoints;
1933   }
1934   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1935   /* Put owned points first */
1936   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1937   if (numPoints > 0) {
1938     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1939     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1940     for (p = 0; p < numPoints; p++) {
1941       remotePoints[idx].index = remoteRoots[p];
1942       remotePoints[idx].rank = rank;
1943       idx++;
1944     }
1945     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1946     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1947   }
1948   /* Now add remote points */
1949   for (n = 0; n < numProcs; ++n) {
1950     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1951     if (numPoints <= 0 || n == rank) continue;
1952     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1953     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1954     for (p = 0; p < numPoints; p++) {
1955       remotePoints[idx].index = remoteRoots[p];
1956       remotePoints[idx].rank = n;
1957       idx++;
1958     }
1959     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1960     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1961   }
1962   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1963   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1964   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1965   PetscFunctionReturn(0);
1966 }
1967