xref: /petsc/src/dm/impls/plex/plexpartition.c (revision ed7fd8fd1b9cf8f9e846cbf7ab4a097e06d7aeb1)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
111   if (nroots > 0) {
112     ISLocalToGlobalMapping ltogCells;
113     PetscInt n, size, *cells_arr;
114     /* In parallel, apply a global cell numbering to the graph */
115     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
119     /* Convert to positive global cell numbers */
120     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
121     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
124   }
125   if (adjacency) *adjacency = graph;
126   /* Clean up */
127   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
128   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
129   ierr = PetscFree(adj);CHKERRQ(ierr);
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "DMPlexCreateNeighborCSR"
135 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136 {
137   const PetscInt maxFaceCases = 30;
138   PetscInt       numFaceCases = 0;
139   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140   PetscInt      *off, *adj;
141   PetscInt      *neighborCells = NULL;
142   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   /* For parallel partitioning, I think you have to communicate supports */
147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
148   cellDim = dim - cellHeight;
149   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
150   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
151   if (cEnd - cStart == 0) {
152     if (numVertices) *numVertices = 0;
153     if (offsets)   *offsets   = NULL;
154     if (adjacency) *adjacency = NULL;
155     PetscFunctionReturn(0);
156   }
157   numCells  = cEnd - cStart;
158   faceDepth = depth - cellHeight;
159   if (dim == depth) {
160     PetscInt f, fStart, fEnd;
161 
162     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
163     /* Count neighboring cells */
164     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
165     for (f = fStart; f < fEnd; ++f) {
166       const PetscInt *support;
167       PetscInt        supportSize;
168       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
169       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
170       if (supportSize == 2) {
171         PetscInt numChildren;
172 
173         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
174         if (!numChildren) {
175           ++off[support[0]-cStart+1];
176           ++off[support[1]-cStart+1];
177         }
178       }
179     }
180     /* Prefix sum */
181     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182     if (adjacency) {
183       PetscInt *tmp;
184 
185       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
186       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
187       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
188       /* Get neighboring cells */
189       for (f = fStart; f < fEnd; ++f) {
190         const PetscInt *support;
191         PetscInt        supportSize;
192         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
193         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
194         if (supportSize == 2) {
195           PetscInt numChildren;
196 
197           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
198           if (!numChildren) {
199             adj[tmp[support[0]-cStart]++] = support[1];
200             adj[tmp[support[1]-cStart]++] = support[0];
201           }
202         }
203       }
204 #if defined(PETSC_USE_DEBUG)
205       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206 #endif
207       ierr = PetscFree(tmp);CHKERRQ(ierr);
208     }
209     if (numVertices) *numVertices = numCells;
210     if (offsets)   *offsets   = off;
211     if (adjacency) *adjacency = adj;
212     PetscFunctionReturn(0);
213   }
214   /* Setup face recognition */
215   if (faceDepth == 1) {
216     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217 
218     for (c = cStart; c < cEnd; ++c) {
219       PetscInt corners;
220 
221       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
222       if (!cornersSeen[corners]) {
223         PetscInt nFV;
224 
225         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226         cornersSeen[corners] = 1;
227 
228         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
229 
230         numFaceVertices[numFaceCases++] = nFV;
231       }
232     }
233   }
234   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
235   /* Count neighboring cells */
236   for (cell = cStart; cell < cEnd; ++cell) {
237     PetscInt numNeighbors = PETSC_DETERMINE, n;
238 
239     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
240     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241     for (n = 0; n < numNeighbors; ++n) {
242       PetscInt        cellPair[2];
243       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244       PetscInt        meetSize = 0;
245       const PetscInt *meet    = NULL;
246 
247       cellPair[0] = cell; cellPair[1] = neighborCells[n];
248       if (cellPair[0] == cellPair[1]) continue;
249       if (!found) {
250         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
251         if (meetSize) {
252           PetscInt f;
253 
254           for (f = 0; f < numFaceCases; ++f) {
255             if (numFaceVertices[f] == meetSize) {
256               found = PETSC_TRUE;
257               break;
258             }
259           }
260         }
261         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
262       }
263       if (found) ++off[cell-cStart+1];
264     }
265   }
266   /* Prefix sum */
267   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268 
269   if (adjacency) {
270     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
271     /* Get neighboring cells */
272     for (cell = cStart; cell < cEnd; ++cell) {
273       PetscInt numNeighbors = PETSC_DETERMINE, n;
274       PetscInt cellOffset   = 0;
275 
276       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
277       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278       for (n = 0; n < numNeighbors; ++n) {
279         PetscInt        cellPair[2];
280         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281         PetscInt        meetSize = 0;
282         const PetscInt *meet    = NULL;
283 
284         cellPair[0] = cell; cellPair[1] = neighborCells[n];
285         if (cellPair[0] == cellPair[1]) continue;
286         if (!found) {
287           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
288           if (meetSize) {
289             PetscInt f;
290 
291             for (f = 0; f < numFaceCases; ++f) {
292               if (numFaceVertices[f] == meetSize) {
293                 found = PETSC_TRUE;
294                 break;
295               }
296             }
297           }
298           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
299         }
300         if (found) {
301           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302           ++cellOffset;
303         }
304       }
305     }
306   }
307   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
308   if (numVertices) *numVertices = numCells;
309   if (offsets)   *offsets   = off;
310   if (adjacency) *adjacency = adj;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PetscPartitionerRegister"
316 /*@C
317   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
318 
319   Not Collective
320 
321   Input Parameters:
322 + name        - The name of a new user-defined creation routine
323 - create_func - The creation routine itself
324 
325   Notes:
326   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
327 
328   Sample usage:
329 .vb
330     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
331 .ve
332 
333   Then, your PetscPartitioner type can be chosen with the procedural interface via
334 .vb
335     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
336     PetscPartitionerSetType(PetscPartitioner, "my_part");
337 .ve
338    or at runtime via the option
339 .vb
340     -petscpartitioner_type my_part
341 .ve
342 
343   Level: advanced
344 
345 .keywords: PetscPartitioner, register
346 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
347 
348 @*/
349 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
350 {
351   PetscErrorCode ierr;
352 
353   PetscFunctionBegin;
354   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PetscPartitionerSetType"
360 /*@C
361   PetscPartitionerSetType - Builds a particular PetscPartitioner
362 
363   Collective on PetscPartitioner
364 
365   Input Parameters:
366 + part - The PetscPartitioner object
367 - name - The kind of partitioner
368 
369   Options Database Key:
370 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
371 
372   Level: intermediate
373 
374 .keywords: PetscPartitioner, set, type
375 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
376 @*/
377 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
378 {
379   PetscErrorCode (*r)(PetscPartitioner);
380   PetscBool      match;
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
385   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
386   if (match) PetscFunctionReturn(0);
387 
388   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
389   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
390   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
391 
392   if (part->ops->destroy) {
393     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
394     part->ops->destroy = NULL;
395   }
396   ierr = (*r)(part);CHKERRQ(ierr);
397   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
398   PetscFunctionReturn(0);
399 }
400 
401 #undef __FUNCT__
402 #define __FUNCT__ "PetscPartitionerGetType"
403 /*@C
404   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
405 
406   Not Collective
407 
408   Input Parameter:
409 . part - The PetscPartitioner
410 
411   Output Parameter:
412 . name - The PetscPartitioner type name
413 
414   Level: intermediate
415 
416 .keywords: PetscPartitioner, get, type, name
417 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
418 @*/
419 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
420 {
421   PetscErrorCode ierr;
422 
423   PetscFunctionBegin;
424   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
425   PetscValidPointer(name, 2);
426   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
427   *name = ((PetscObject) part)->type_name;
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "PetscPartitionerView"
433 /*@C
434   PetscPartitionerView - Views a PetscPartitioner
435 
436   Collective on PetscPartitioner
437 
438   Input Parameter:
439 + part - the PetscPartitioner object to view
440 - v    - the viewer
441 
442   Level: developer
443 
444 .seealso: PetscPartitionerDestroy()
445 @*/
446 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
447 {
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
452   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
453   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
454   PetscFunctionReturn(0);
455 }
456 
457 #undef __FUNCT__
458 #define __FUNCT__ "PetscPartitionerViewFromOptions"
459 /*
460   PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed.
461 
462   Collective on PetscPartitioner
463 
464   Input Parameters:
465 + part   - the PetscPartitioner
466 . prefix - prefix to use for viewing, or NULL
467 - optionname - option to activate viewing
468 
469   Level: intermediate
470 
471 .keywords: PetscPartitioner, view, options, database
472 .seealso: VecViewFromOptions(), MatViewFromOptions()
473 */
474 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[])
475 {
476   PetscViewer       viewer;
477   PetscViewerFormat format;
478   PetscBool         flg;
479   PetscErrorCode    ierr;
480 
481   PetscFunctionBegin;
482   if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix,                       optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
483   else        {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
484   if (flg) {
485     ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr);
486     ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr);
487     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
488     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
489   }
490   PetscFunctionReturn(0);
491 }
492 #undef __FUNCT__
493 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
494 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
495 {
496   const char    *defaultType;
497   char           name[256];
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
503   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
504   else                                  defaultType = ((PetscObject) part)->type_name;
505   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
506 
507   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
508   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
509   if (flg) {
510     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
511   } else if (!((PetscObject) part)->type_name) {
512     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
513   }
514   ierr = PetscOptionsEnd();CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 #undef __FUNCT__
519 #define __FUNCT__ "PetscPartitionerSetFromOptions"
520 /*@
521   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
522 
523   Collective on PetscPartitioner
524 
525   Input Parameter:
526 . part - the PetscPartitioner object to set options for
527 
528   Level: developer
529 
530 .seealso: PetscPartitionerView()
531 @*/
532 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
533 {
534   PetscErrorCode ierr;
535 
536   PetscFunctionBegin;
537   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
538   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
539 
540   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
541   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
542   /* process any options handlers added with PetscObjectAddOptionsHandler() */
543   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
544   ierr = PetscOptionsEnd();CHKERRQ(ierr);
545   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
546   PetscFunctionReturn(0);
547 }
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "PetscPartitionerSetUp"
551 /*@C
552   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
553 
554   Collective on PetscPartitioner
555 
556   Input Parameter:
557 . part - the PetscPartitioner object to setup
558 
559   Level: developer
560 
561 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
562 @*/
563 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
564 {
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
569   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
570   PetscFunctionReturn(0);
571 }
572 
573 #undef __FUNCT__
574 #define __FUNCT__ "PetscPartitionerDestroy"
575 /*@
576   PetscPartitionerDestroy - Destroys a PetscPartitioner object
577 
578   Collective on PetscPartitioner
579 
580   Input Parameter:
581 . part - the PetscPartitioner object to destroy
582 
583   Level: developer
584 
585 .seealso: PetscPartitionerView()
586 @*/
587 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
588 {
589   PetscErrorCode ierr;
590 
591   PetscFunctionBegin;
592   if (!*part) PetscFunctionReturn(0);
593   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
594 
595   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
596   ((PetscObject) (*part))->refct = 0;
597 
598   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
599   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
600   PetscFunctionReturn(0);
601 }
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "PetscPartitionerCreate"
605 /*@
606   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
607 
608   Collective on MPI_Comm
609 
610   Input Parameter:
611 . comm - The communicator for the PetscPartitioner object
612 
613   Output Parameter:
614 . part - The PetscPartitioner object
615 
616   Level: beginner
617 
618 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE
619 @*/
620 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
621 {
622   PetscPartitioner p;
623   PetscErrorCode   ierr;
624 
625   PetscFunctionBegin;
626   PetscValidPointer(part, 2);
627   *part = NULL;
628   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
629 
630   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
631 
632   *part = p;
633   PetscFunctionReturn(0);
634 }
635 
636 #undef __FUNCT__
637 #define __FUNCT__ "PetscPartitionerPartition"
638 /*@
639   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
640 
641   Collective on DM
642 
643   Input Parameters:
644 + part    - The PetscPartitioner
645 - dm      - The mesh DM
646 
647   Output Parameters:
648 + partSection     - The PetscSection giving the division of points by partition
649 - partition       - The list of points by partition
650 
651   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
652 
653   Level: developer
654 
655 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
656 @*/
657 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
658 {
659   PetscMPIInt    size;
660   PetscErrorCode ierr;
661 
662   PetscFunctionBegin;
663   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
664   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
665   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
666   PetscValidPointer(partition, 5);
667   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
668   if (size == 1) {
669     PetscInt *points;
670     PetscInt  cStart, cEnd, c;
671 
672     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
673     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
674     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
675     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
676     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
677     for (c = cStart; c < cEnd; ++c) points[c] = c;
678     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
679     PetscFunctionReturn(0);
680   }
681   if (part->height == 0) {
682     PetscInt  numVertices;
683     PetscInt *start     = NULL;
684     PetscInt *adjacency = NULL;
685 
686     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
687     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
688     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
689     ierr = PetscFree(start);CHKERRQ(ierr);
690     ierr = PetscFree(adjacency);CHKERRQ(ierr);
691   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
692   PetscFunctionReturn(0);
693 }
694 
695 #undef __FUNCT__
696 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
697 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
698 {
699   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
700   PetscErrorCode          ierr;
701 
702   PetscFunctionBegin;
703   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
704   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
705   ierr = PetscFree(p);CHKERRQ(ierr);
706   PetscFunctionReturn(0);
707 }
708 
709 #undef __FUNCT__
710 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
711 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
712 {
713   PetscViewerFormat format;
714   PetscErrorCode    ierr;
715 
716   PetscFunctionBegin;
717   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
718   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
719   PetscFunctionReturn(0);
720 }
721 
722 #undef __FUNCT__
723 #define __FUNCT__ "PetscPartitionerView_Shell"
724 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
725 {
726   PetscBool      iascii;
727   PetscErrorCode ierr;
728 
729   PetscFunctionBegin;
730   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
731   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
732   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
733   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
734   PetscFunctionReturn(0);
735 }
736 
737 #undef __FUNCT__
738 #define __FUNCT__ "PetscPartitionerPartition_Shell"
739 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
740 {
741   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
742   PetscInt                np;
743   PetscErrorCode          ierr;
744 
745   PetscFunctionBegin;
746   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
747   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
748   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
749   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
750   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
751   *partition = p->partition;
752   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
753   PetscFunctionReturn(0);
754 }
755 
756 #undef __FUNCT__
757 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
758 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
759 {
760   PetscFunctionBegin;
761   part->ops->view      = PetscPartitionerView_Shell;
762   part->ops->destroy   = PetscPartitionerDestroy_Shell;
763   part->ops->partition = PetscPartitionerPartition_Shell;
764   PetscFunctionReturn(0);
765 }
766 
767 /*MC
768   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
769 
770   Level: intermediate
771 
772 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
773 M*/
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "PetscPartitionerCreate_Shell"
777 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
778 {
779   PetscPartitioner_Shell *p;
780   PetscErrorCode          ierr;
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
784   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
785   part->data = p;
786 
787   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
788   PetscFunctionReturn(0);
789 }
790 
791 #undef __FUNCT__
792 #define __FUNCT__ "PetscPartitionerShellSetPartition"
793 /*@C
794   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
795 
796   Collective on PART
797 
798   Input Parameters:
799 + part     - The PetscPartitioner
800 . numProcs - The number of partitions
801 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
802 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
803 
804   Level: developer
805 
806   Notes:
807 
808     It is safe to free the sizes and points arrays after use in this routine.
809 
810 .seealso DMPlexDistribute(), PetscPartitionerCreate()
811 @*/
812 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
813 {
814   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
815   PetscInt                proc, numPoints;
816   PetscErrorCode          ierr;
817 
818   PetscFunctionBegin;
819   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
820   if (sizes) {PetscValidPointer(sizes, 3);}
821   if (sizes) {PetscValidPointer(points, 4);}
822   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
823   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
824   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
825   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
826   if (sizes) {
827     for (proc = 0; proc < numProcs; ++proc) {
828       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
829     }
830   }
831   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
832   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
833   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
834   PetscFunctionReturn(0);
835 }
836 
837 #undef __FUNCT__
838 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
839 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
840 {
841   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
842   PetscErrorCode          ierr;
843 
844   PetscFunctionBegin;
845   ierr = PetscFree(p);CHKERRQ(ierr);
846   PetscFunctionReturn(0);
847 }
848 
849 #undef __FUNCT__
850 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
851 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
852 {
853   PetscViewerFormat format;
854   PetscErrorCode    ierr;
855 
856   PetscFunctionBegin;
857   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
858   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 
862 #undef __FUNCT__
863 #define __FUNCT__ "PetscPartitionerView_Simple"
864 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
865 {
866   PetscBool      iascii;
867   PetscErrorCode ierr;
868 
869   PetscFunctionBegin;
870   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
871   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
872   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
873   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
874   PetscFunctionReturn(0);
875 }
876 
877 #undef __FUNCT__
878 #define __FUNCT__ "PetscPartitionerPartition_Simple"
879 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
880 {
881   PetscInt       np;
882   PetscErrorCode ierr;
883 
884   PetscFunctionBegin;
885   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
886   for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
887   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
888   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
894 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
895 {
896   PetscFunctionBegin;
897   part->ops->view      = PetscPartitionerView_Simple;
898   part->ops->destroy   = PetscPartitionerDestroy_Simple;
899   part->ops->partition = PetscPartitionerPartition_Simple;
900   PetscFunctionReturn(0);
901 }
902 
903 /*MC
904   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
905 
906   Level: intermediate
907 
908 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
909 M*/
910 
911 #undef __FUNCT__
912 #define __FUNCT__ "PetscPartitionerCreate_Simple"
913 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
914 {
915   PetscPartitioner_Simple *p;
916   PetscErrorCode           ierr;
917 
918   PetscFunctionBegin;
919   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
920   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
921   part->data = p;
922 
923   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
924   PetscFunctionReturn(0);
925 }
926 
927 #undef __FUNCT__
928 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
929 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
930 {
931   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
932   PetscErrorCode          ierr;
933 
934   PetscFunctionBegin;
935   ierr = PetscFree(p);CHKERRQ(ierr);
936   PetscFunctionReturn(0);
937 }
938 
939 #undef __FUNCT__
940 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
941 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
942 {
943   PetscViewerFormat format;
944   PetscErrorCode    ierr;
945 
946   PetscFunctionBegin;
947   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
948   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
949   PetscFunctionReturn(0);
950 }
951 
952 #undef __FUNCT__
953 #define __FUNCT__ "PetscPartitionerView_Chaco"
954 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
955 {
956   PetscBool      iascii;
957   PetscErrorCode ierr;
958 
959   PetscFunctionBegin;
960   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
961   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
962   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
963   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
964   PetscFunctionReturn(0);
965 }
966 
967 #if defined(PETSC_HAVE_CHACO)
968 #if defined(PETSC_HAVE_UNISTD_H)
969 #include <unistd.h>
970 #endif
971 /* Chaco does not have an include file */
972 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
973                        float *ewgts, float *x, float *y, float *z, char *outassignname,
974                        char *outfilename, short *assignment, int architecture, int ndims_tot,
975                        int mesh_dims[3], double *goal, int global_method, int local_method,
976                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
977 
978 extern int FREE_GRAPH;
979 #endif
980 
981 #undef __FUNCT__
982 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
983 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
984 {
985 #if defined(PETSC_HAVE_CHACO)
986   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
987   MPI_Comm       comm;
988   int            nvtxs          = numVertices; /* number of vertices in full graph */
989   int           *vwgts          = NULL;   /* weights for all vertices */
990   float         *ewgts          = NULL;   /* weights for all edges */
991   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
992   char          *outassignname  = NULL;   /*  name of assignment output file */
993   char          *outfilename    = NULL;   /* output file name */
994   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
995   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
996   int            mesh_dims[3];            /* dimensions of mesh of processors */
997   double        *goal          = NULL;    /* desired set sizes for each set */
998   int            global_method = 1;       /* global partitioning algorithm */
999   int            local_method  = 1;       /* local partitioning algorithm */
1000   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1001   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1002   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1003   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1004   long           seed          = 123636512; /* for random graph mutations */
1005   short int     *assignment;              /* Output partition */
1006   int            fd_stdout, fd_pipe[2];
1007   PetscInt      *points;
1008   int            i, v, p;
1009   PetscErrorCode ierr;
1010 
1011   PetscFunctionBegin;
1012   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1013   if (!numVertices) {
1014     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1015     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1016     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1017     PetscFunctionReturn(0);
1018   }
1019   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1020   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1021 
1022   if (global_method == INERTIAL_METHOD) {
1023     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1024     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1025   }
1026   mesh_dims[0] = nparts;
1027   mesh_dims[1] = 1;
1028   mesh_dims[2] = 1;
1029   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1030   /* Chaco outputs to stdout. We redirect this to a buffer. */
1031   /* TODO: check error codes for UNIX calls */
1032 #if defined(PETSC_HAVE_UNISTD_H)
1033   {
1034     int piperet;
1035     piperet = pipe(fd_pipe);
1036     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1037     fd_stdout = dup(1);
1038     close(1);
1039     dup2(fd_pipe[1], 1);
1040   }
1041 #endif
1042   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1043                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1044                    vmax, ndims, eigtol, seed);
1045 #if defined(PETSC_HAVE_UNISTD_H)
1046   {
1047     char msgLog[10000];
1048     int  count;
1049 
1050     fflush(stdout);
1051     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1052     if (count < 0) count = 0;
1053     msgLog[count] = 0;
1054     close(1);
1055     dup2(fd_stdout, 1);
1056     close(fd_stdout);
1057     close(fd_pipe[0]);
1058     close(fd_pipe[1]);
1059     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1060   }
1061 #endif
1062   /* Convert to PetscSection+IS */
1063   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1064   for (v = 0; v < nvtxs; ++v) {
1065     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1066   }
1067   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1068   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1069   for (p = 0, i = 0; p < nparts; ++p) {
1070     for (v = 0; v < nvtxs; ++v) {
1071       if (assignment[v] == p) points[i++] = v;
1072     }
1073   }
1074   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1075   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1076   if (global_method == INERTIAL_METHOD) {
1077     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1078   }
1079   ierr = PetscFree(assignment);CHKERRQ(ierr);
1080   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1081   PetscFunctionReturn(0);
1082 #else
1083   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1084 #endif
1085 }
1086 
1087 #undef __FUNCT__
1088 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1089 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1090 {
1091   PetscFunctionBegin;
1092   part->ops->view      = PetscPartitionerView_Chaco;
1093   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1094   part->ops->partition = PetscPartitionerPartition_Chaco;
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 /*MC
1099   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1100 
1101   Level: intermediate
1102 
1103 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1104 M*/
1105 
1106 #undef __FUNCT__
1107 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1108 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1109 {
1110   PetscPartitioner_Chaco *p;
1111   PetscErrorCode          ierr;
1112 
1113   PetscFunctionBegin;
1114   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1115   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1116   part->data = p;
1117 
1118   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1119   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 #undef __FUNCT__
1124 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1125 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1126 {
1127   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1128   PetscErrorCode             ierr;
1129 
1130   PetscFunctionBegin;
1131   ierr = PetscFree(p);CHKERRQ(ierr);
1132   PetscFunctionReturn(0);
1133 }
1134 
1135 #undef __FUNCT__
1136 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1137 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1138 {
1139   PetscViewerFormat format;
1140   PetscErrorCode    ierr;
1141 
1142   PetscFunctionBegin;
1143   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1144   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 #undef __FUNCT__
1149 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1150 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1151 {
1152   PetscBool      iascii;
1153   PetscErrorCode ierr;
1154 
1155   PetscFunctionBegin;
1156   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1157   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1158   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1159   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1160   PetscFunctionReturn(0);
1161 }
1162 
1163 #if defined(PETSC_HAVE_PARMETIS)
1164 #include <parmetis.h>
1165 #endif
1166 
1167 #undef __FUNCT__
1168 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1169 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1170 {
1171 #if defined(PETSC_HAVE_PARMETIS)
1172   MPI_Comm       comm;
1173   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1174   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1175   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1176   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1177   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1178   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1179   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1180   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1181   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1182   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1183   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1184   PetscInt       options[5];               /* Options */
1185   /* Outputs */
1186   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1187   PetscInt      *assignment, *points;
1188   PetscMPIInt    rank, p, v, i;
1189   PetscErrorCode ierr;
1190 
1191   PetscFunctionBegin;
1192   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1193   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1194   options[0] = 0; /* Use all defaults */
1195   /* Calculate vertex distribution */
1196   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1197   vtxdist[0] = 0;
1198   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1199   for (p = 2; p <= nparts; ++p) {
1200     vtxdist[p] += vtxdist[p-1];
1201   }
1202   /* Calculate weights */
1203   for (p = 0; p < nparts; ++p) {
1204     tpwgts[p] = 1.0/nparts;
1205   }
1206   ubvec[0] = 1.05;
1207 
1208   if (nparts == 1) {
1209     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1210   } else {
1211     if (vtxdist[1] == vtxdist[nparts]) {
1212       if (!rank) {
1213         PetscStackPush("METIS_PartGraphKway");
1214         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1215         PetscStackPop;
1216         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1217       }
1218     } else {
1219       PetscStackPush("ParMETIS_V3_PartKway");
1220       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1221       PetscStackPop;
1222       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1223     }
1224   }
1225   /* Convert to PetscSection+IS */
1226   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1227   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1228   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1229   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1230   for (p = 0, i = 0; p < nparts; ++p) {
1231     for (v = 0; v < nvtxs; ++v) {
1232       if (assignment[v] == p) points[i++] = v;
1233     }
1234   }
1235   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1236   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1237   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1238   PetscFunctionReturn(0);
1239 #else
1240   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1241 #endif
1242 }
1243 
1244 #undef __FUNCT__
1245 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1246 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1247 {
1248   PetscFunctionBegin;
1249   part->ops->view      = PetscPartitionerView_ParMetis;
1250   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1251   part->ops->partition = PetscPartitionerPartition_ParMetis;
1252   PetscFunctionReturn(0);
1253 }
1254 
1255 /*MC
1256   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1257 
1258   Level: intermediate
1259 
1260 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1261 M*/
1262 
1263 #undef __FUNCT__
1264 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1265 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1266 {
1267   PetscPartitioner_ParMetis *p;
1268   PetscErrorCode          ierr;
1269 
1270   PetscFunctionBegin;
1271   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1272   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1273   part->data = p;
1274 
1275   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1276   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1277   PetscFunctionReturn(0);
1278 }
1279 
1280 #undef __FUNCT__
1281 #define __FUNCT__ "DMPlexGetPartitioner"
1282 /*@
1283   DMPlexGetPartitioner - Get the mesh partitioner
1284 
1285   Not collective
1286 
1287   Input Parameter:
1288 . dm - The DM
1289 
1290   Output Parameter:
1291 . part - The PetscPartitioner
1292 
1293   Level: developer
1294 
1295   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1296 
1297 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1298 @*/
1299 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1300 {
1301   DM_Plex *mesh = (DM_Plex *) dm->data;
1302 
1303   PetscFunctionBegin;
1304   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1305   PetscValidPointer(part, 2);
1306   *part = mesh->partitioner;
1307   PetscFunctionReturn(0);
1308 }
1309 
1310 #undef __FUNCT__
1311 #define __FUNCT__ "DMPlexSetPartitioner"
1312 /*@
1313   DMPlexSetPartitioner - Set the mesh partitioner
1314 
1315   logically collective on dm and part
1316 
1317   Input Parameters:
1318 + dm - The DM
1319 - part - The partitioner
1320 
1321   Level: developer
1322 
1323   Note: Any existing PetscPartitioner will be destroyed.
1324 
1325 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1326 @*/
1327 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1328 {
1329   DM_Plex       *mesh = (DM_Plex *) dm->data;
1330   PetscErrorCode ierr;
1331 
1332   PetscFunctionBegin;
1333   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1334   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1335   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1336   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1337   mesh->partitioner = part;
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 #undef __FUNCT__
1342 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1343 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point)
1344 {
1345   PetscInt       parent, closureSize, *closure = NULL, i, pStart, pEnd;
1346   PetscErrorCode ierr;
1347 
1348   PetscFunctionBegin;
1349   ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1350   if (parent == point) PetscFunctionReturn(0);
1351   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1352   ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1353   for (i = 0; i < closureSize; i++) {
1354     PetscInt cpoint = closure[2*i];
1355 
1356     ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1357     ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint);CHKERRQ(ierr);
1358   }
1359   ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1360   PetscFunctionReturn(0);
1361 }
1362 
1363 #undef __FUNCT__
1364 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1365 /*@
1366   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1367 
1368   Input Parameters:
1369 + dm     - The DM
1370 - label  - DMLabel assinging ranks to remote roots
1371 
1372   Level: developer
1373 
1374 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1375 @*/
1376 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1377 {
1378   IS              rankIS,   pointIS;
1379   const PetscInt *ranks,   *points;
1380   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1381   PetscInt       *closure = NULL;
1382   PetscErrorCode  ierr;
1383 
1384   PetscFunctionBegin;
1385   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1386   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1387   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1388   for (r = 0; r < numRanks; ++r) {
1389     const PetscInt rank = ranks[r];
1390 
1391     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1392     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1393     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1394     for (p = 0; p < numPoints; ++p) {
1395       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1396       for (c = 0; c < closureSize*2; c += 2) {
1397         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1398         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c]);CHKERRQ(ierr);
1399       }
1400     }
1401     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1402     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1403   }
1404   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1405   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1406   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 #undef __FUNCT__
1411 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1412 /*@
1413   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1414 
1415   Input Parameters:
1416 + dm     - The DM
1417 - label  - DMLabel assinging ranks to remote roots
1418 
1419   Level: developer
1420 
1421 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1422 @*/
1423 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1424 {
1425   IS              rankIS,   pointIS;
1426   const PetscInt *ranks,   *points;
1427   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1428   PetscInt       *adj = NULL;
1429   PetscErrorCode  ierr;
1430 
1431   PetscFunctionBegin;
1432   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1433   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1434   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1435   for (r = 0; r < numRanks; ++r) {
1436     const PetscInt rank = ranks[r];
1437 
1438     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1439     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1440     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1441     for (p = 0; p < numPoints; ++p) {
1442       adjSize = PETSC_DETERMINE;
1443       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1444       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1445     }
1446     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1447     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1448   }
1449   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1450   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1451   ierr = PetscFree(adj);CHKERRQ(ierr);
1452   PetscFunctionReturn(0);
1453 }
1454 
1455 #undef __FUNCT__
1456 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1457 /*@
1458   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1459 
1460   Input Parameters:
1461 + dm        - The DM
1462 . rootLabel - DMLabel assinging ranks to local roots
1463 . processSF - A star forest mapping into the local index on each remote rank
1464 
1465   Output Parameter:
1466 - leafLabel - DMLabel assinging ranks to remote roots
1467 
1468   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1469   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1470 
1471   Level: developer
1472 
1473 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1474 @*/
1475 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1476 {
1477   MPI_Comm           comm;
1478   PetscMPIInt        rank, numProcs;
1479   PetscInt           p, n, numNeighbors, size, l, nleaves;
1480   PetscSF            sfPoint;
1481   PetscSFNode       *rootPoints, *leafPoints;
1482   PetscSection       rootSection, leafSection;
1483   const PetscSFNode *remote;
1484   const PetscInt    *local, *neighbors;
1485   IS                 valueIS;
1486   PetscErrorCode     ierr;
1487 
1488   PetscFunctionBegin;
1489   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1490   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1491   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1492   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1493 
1494   /* Convert to (point, rank) and use actual owners */
1495   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1496   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1497   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1498   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1499   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1500   for (n = 0; n < numNeighbors; ++n) {
1501     PetscInt numPoints;
1502 
1503     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1504     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1505   }
1506   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1507   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1508   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1509   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1510   for (n = 0; n < numNeighbors; ++n) {
1511     IS              pointIS;
1512     const PetscInt *points;
1513     PetscInt        off, numPoints, p;
1514 
1515     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1516     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1517     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1518     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1519     for (p = 0; p < numPoints; ++p) {
1520       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1521       else       {l = -1;}
1522       if (l >= 0) {rootPoints[off+p] = remote[l];}
1523       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1524     }
1525     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1526     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1527   }
1528   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1529   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1530   /* Communicate overlap */
1531   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1532   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1533   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1534   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1535   for (p = 0; p < size; p++) {
1536     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1537   }
1538   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1539   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1540   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1541   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1542   PetscFunctionReturn(0);
1543 }
1544 
1545 #undef __FUNCT__
1546 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1547 /*@
1548   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1549 
1550   Input Parameters:
1551 + dm    - The DM
1552 . label - DMLabel assinging ranks to remote roots
1553 
1554   Output Parameter:
1555 - sf    - The star forest communication context encapsulating the defined mapping
1556 
1557   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1558 
1559   Level: developer
1560 
1561 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1562 @*/
1563 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1564 {
1565   PetscMPIInt     rank, numProcs;
1566   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1567   PetscSFNode    *remotePoints;
1568   IS              remoteRootIS;
1569   const PetscInt *remoteRoots;
1570   PetscErrorCode ierr;
1571 
1572   PetscFunctionBegin;
1573   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1574   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1575 
1576   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1577     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1578     numRemote += numPoints;
1579   }
1580   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1581   /* Put owned points first */
1582   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1583   if (numPoints > 0) {
1584     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1585     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1586     for (p = 0; p < numPoints; p++) {
1587       remotePoints[idx].index = remoteRoots[p];
1588       remotePoints[idx].rank = rank;
1589       idx++;
1590     }
1591     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1592     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1593   }
1594   /* Now add remote points */
1595   for (n = 0; n < numProcs; ++n) {
1596     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1597     if (numPoints <= 0 || n == rank) continue;
1598     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1599     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1600     for (p = 0; p < numPoints; p++) {
1601       remotePoints[idx].index = remoteRoots[p];
1602       remotePoints[idx].rank = n;
1603       idx++;
1604     }
1605     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1606     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1607   }
1608   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1609   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1610   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1611   PetscFunctionReturn(0);
1612 }
1613