xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 6ec9520ff15c6c6c97b00a55e49c5b80be990a1d)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
111   if (nroots > 0) {
112     ISLocalToGlobalMapping ltogCells;
113     PetscInt n, size, *cells_arr;
114     /* In parallel, apply a global cell numbering to the graph */
115     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
119     /* Convert to positive global cell numbers */
120     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
121     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
124   }
125   if (adjacency) *adjacency = graph;
126   /* Clean up */
127   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
128   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
129   ierr = PetscFree(adj);CHKERRQ(ierr);
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "DMPlexCreateNeighborCSR"
135 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136 {
137   const PetscInt maxFaceCases = 30;
138   PetscInt       numFaceCases = 0;
139   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140   PetscInt      *off, *adj;
141   PetscInt      *neighborCells = NULL;
142   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   /* For parallel partitioning, I think you have to communicate supports */
147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
148   cellDim = dim - cellHeight;
149   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
150   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
151   if (cEnd - cStart == 0) {
152     if (numVertices) *numVertices = 0;
153     if (offsets)   *offsets   = NULL;
154     if (adjacency) *adjacency = NULL;
155     PetscFunctionReturn(0);
156   }
157   numCells  = cEnd - cStart;
158   faceDepth = depth - cellHeight;
159   if (dim == depth) {
160     PetscInt f, fStart, fEnd;
161 
162     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
163     /* Count neighboring cells */
164     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
165     for (f = fStart; f < fEnd; ++f) {
166       const PetscInt *support;
167       PetscInt        supportSize;
168       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
169       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
170       if (supportSize == 2) {
171         PetscInt numChildren;
172 
173         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
174         if (!numChildren) {
175           ++off[support[0]-cStart+1];
176           ++off[support[1]-cStart+1];
177         }
178       }
179     }
180     /* Prefix sum */
181     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182     if (adjacency) {
183       PetscInt *tmp;
184 
185       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
186       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
187       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
188       /* Get neighboring cells */
189       for (f = fStart; f < fEnd; ++f) {
190         const PetscInt *support;
191         PetscInt        supportSize;
192         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
193         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
194         if (supportSize == 2) {
195           PetscInt numChildren;
196 
197           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
198           if (!numChildren) {
199             adj[tmp[support[0]-cStart]++] = support[1];
200             adj[tmp[support[1]-cStart]++] = support[0];
201           }
202         }
203       }
204 #if defined(PETSC_USE_DEBUG)
205       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206 #endif
207       ierr = PetscFree(tmp);CHKERRQ(ierr);
208     }
209     if (numVertices) *numVertices = numCells;
210     if (offsets)   *offsets   = off;
211     if (adjacency) *adjacency = adj;
212     PetscFunctionReturn(0);
213   }
214   /* Setup face recognition */
215   if (faceDepth == 1) {
216     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217 
218     for (c = cStart; c < cEnd; ++c) {
219       PetscInt corners;
220 
221       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
222       if (!cornersSeen[corners]) {
223         PetscInt nFV;
224 
225         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226         cornersSeen[corners] = 1;
227 
228         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
229 
230         numFaceVertices[numFaceCases++] = nFV;
231       }
232     }
233   }
234   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
235   /* Count neighboring cells */
236   for (cell = cStart; cell < cEnd; ++cell) {
237     PetscInt numNeighbors = PETSC_DETERMINE, n;
238 
239     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
240     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241     for (n = 0; n < numNeighbors; ++n) {
242       PetscInt        cellPair[2];
243       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244       PetscInt        meetSize = 0;
245       const PetscInt *meet    = NULL;
246 
247       cellPair[0] = cell; cellPair[1] = neighborCells[n];
248       if (cellPair[0] == cellPair[1]) continue;
249       if (!found) {
250         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
251         if (meetSize) {
252           PetscInt f;
253 
254           for (f = 0; f < numFaceCases; ++f) {
255             if (numFaceVertices[f] == meetSize) {
256               found = PETSC_TRUE;
257               break;
258             }
259           }
260         }
261         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
262       }
263       if (found) ++off[cell-cStart+1];
264     }
265   }
266   /* Prefix sum */
267   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268 
269   if (adjacency) {
270     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
271     /* Get neighboring cells */
272     for (cell = cStart; cell < cEnd; ++cell) {
273       PetscInt numNeighbors = PETSC_DETERMINE, n;
274       PetscInt cellOffset   = 0;
275 
276       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
277       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278       for (n = 0; n < numNeighbors; ++n) {
279         PetscInt        cellPair[2];
280         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281         PetscInt        meetSize = 0;
282         const PetscInt *meet    = NULL;
283 
284         cellPair[0] = cell; cellPair[1] = neighborCells[n];
285         if (cellPair[0] == cellPair[1]) continue;
286         if (!found) {
287           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
288           if (meetSize) {
289             PetscInt f;
290 
291             for (f = 0; f < numFaceCases; ++f) {
292               if (numFaceVertices[f] == meetSize) {
293                 found = PETSC_TRUE;
294                 break;
295               }
296             }
297           }
298           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
299         }
300         if (found) {
301           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302           ++cellOffset;
303         }
304       }
305     }
306   }
307   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
308   if (numVertices) *numVertices = numCells;
309   if (offsets)   *offsets   = off;
310   if (adjacency) *adjacency = adj;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PetscPartitionerRegister"
316 /*@C
317   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
318 
319   Not Collective
320 
321   Input Parameters:
322 + name        - The name of a new user-defined creation routine
323 - create_func - The creation routine itself
324 
325   Notes:
326   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
327 
328   Sample usage:
329 .vb
330     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
331 .ve
332 
333   Then, your PetscPartitioner type can be chosen with the procedural interface via
334 .vb
335     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
336     PetscPartitionerSetType(PetscPartitioner, "my_part");
337 .ve
338    or at runtime via the option
339 .vb
340     -petscpartitioner_type my_part
341 .ve
342 
343   Level: advanced
344 
345 .keywords: PetscPartitioner, register
346 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
347 
348 @*/
349 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
350 {
351   PetscErrorCode ierr;
352 
353   PetscFunctionBegin;
354   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PetscPartitionerSetType"
360 /*@C
361   PetscPartitionerSetType - Builds a particular PetscPartitioner
362 
363   Collective on PetscPartitioner
364 
365   Input Parameters:
366 + part - The PetscPartitioner object
367 - name - The kind of partitioner
368 
369   Options Database Key:
370 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
371 
372   Level: intermediate
373 
374 .keywords: PetscPartitioner, set, type
375 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
376 @*/
377 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
378 {
379   PetscErrorCode (*r)(PetscPartitioner);
380   PetscBool      match;
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
385   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
386   if (match) PetscFunctionReturn(0);
387 
388   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
389   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
390   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
391 
392   if (part->ops->destroy) {
393     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
394     part->ops->destroy = NULL;
395   }
396   ierr = (*r)(part);CHKERRQ(ierr);
397   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
398   PetscFunctionReturn(0);
399 }
400 
401 #undef __FUNCT__
402 #define __FUNCT__ "PetscPartitionerGetType"
403 /*@C
404   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
405 
406   Not Collective
407 
408   Input Parameter:
409 . part - The PetscPartitioner
410 
411   Output Parameter:
412 . name - The PetscPartitioner type name
413 
414   Level: intermediate
415 
416 .keywords: PetscPartitioner, get, type, name
417 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
418 @*/
419 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
420 {
421   PetscErrorCode ierr;
422 
423   PetscFunctionBegin;
424   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
425   PetscValidPointer(name, 2);
426   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
427   *name = ((PetscObject) part)->type_name;
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "PetscPartitionerView"
433 /*@C
434   PetscPartitionerView - Views a PetscPartitioner
435 
436   Collective on PetscPartitioner
437 
438   Input Parameter:
439 + part - the PetscPartitioner object to view
440 - v    - the viewer
441 
442   Level: developer
443 
444 .seealso: PetscPartitionerDestroy()
445 @*/
446 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
447 {
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
452   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
453   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
454   PetscFunctionReturn(0);
455 }
456 
457 #undef __FUNCT__
458 #define __FUNCT__ "PetscPartitionerViewFromOptions"
459 /*
460   PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed.
461 
462   Collective on PetscPartitioner
463 
464   Input Parameters:
465 + part   - the PetscPartitioner
466 . prefix - prefix to use for viewing, or NULL
467 - optionname - option to activate viewing
468 
469   Level: intermediate
470 
471 .keywords: PetscPartitioner, view, options, database
472 .seealso: VecViewFromOptions(), MatViewFromOptions()
473 */
474 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[])
475 {
476   PetscViewer       viewer;
477   PetscViewerFormat format;
478   PetscBool         flg;
479   PetscErrorCode    ierr;
480 
481   PetscFunctionBegin;
482   if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix,                       optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
483   else        {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
484   if (flg) {
485     ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr);
486     ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr);
487     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
488     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
489   }
490   PetscFunctionReturn(0);
491 }
492 #undef __FUNCT__
493 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
494 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
495 {
496   const char    *defaultType;
497   char           name[256];
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
503   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
504   else                                  defaultType = ((PetscObject) part)->type_name;
505   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
506 
507   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
508   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
509   if (flg) {
510     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
511   } else if (!((PetscObject) part)->type_name) {
512     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
513   }
514   ierr = PetscOptionsEnd();CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 #undef __FUNCT__
519 #define __FUNCT__ "PetscPartitionerSetFromOptions"
520 /*@
521   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
522 
523   Collective on PetscPartitioner
524 
525   Input Parameter:
526 . part - the PetscPartitioner object to set options for
527 
528   Level: developer
529 
530 .seealso: PetscPartitionerView()
531 @*/
532 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
533 {
534   PetscErrorCode ierr;
535 
536   PetscFunctionBegin;
537   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
538   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
539 
540   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
541   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
542   /* process any options handlers added with PetscObjectAddOptionsHandler() */
543   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
544   ierr = PetscOptionsEnd();CHKERRQ(ierr);
545   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
546   PetscFunctionReturn(0);
547 }
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "PetscPartitionerSetUp"
551 /*@C
552   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
553 
554   Collective on PetscPartitioner
555 
556   Input Parameter:
557 . part - the PetscPartitioner object to setup
558 
559   Level: developer
560 
561 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
562 @*/
563 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
564 {
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
569   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
570   PetscFunctionReturn(0);
571 }
572 
573 #undef __FUNCT__
574 #define __FUNCT__ "PetscPartitionerDestroy"
575 /*@
576   PetscPartitionerDestroy - Destroys a PetscPartitioner object
577 
578   Collective on PetscPartitioner
579 
580   Input Parameter:
581 . part - the PetscPartitioner object to destroy
582 
583   Level: developer
584 
585 .seealso: PetscPartitionerView()
586 @*/
587 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
588 {
589   PetscErrorCode ierr;
590 
591   PetscFunctionBegin;
592   if (!*part) PetscFunctionReturn(0);
593   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
594 
595   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
596   ((PetscObject) (*part))->refct = 0;
597 
598   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
599   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
600   PetscFunctionReturn(0);
601 }
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "PetscPartitionerCreate"
605 /*@
606   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
607 
608   Collective on MPI_Comm
609 
610   Input Parameter:
611 . comm - The communicator for the PetscPartitioner object
612 
613   Output Parameter:
614 . part - The PetscPartitioner object
615 
616   Level: beginner
617 
618 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE
619 @*/
620 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
621 {
622   PetscPartitioner p;
623   PetscErrorCode   ierr;
624 
625   PetscFunctionBegin;
626   PetscValidPointer(part, 2);
627   *part = NULL;
628   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
629 
630   ierr = PetscHeaderCreate(p, _p_PetscPartitioner, struct _PetscPartitionerOps, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
631   ierr = PetscMemzero(p->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
632 
633   *part = p;
634   PetscFunctionReturn(0);
635 }
636 
637 #undef __FUNCT__
638 #define __FUNCT__ "PetscPartitionerPartition"
639 /*@
640   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
641 
642   Collective on DM
643 
644   Input Parameters:
645 + part    - The PetscPartitioner
646 - dm      - The mesh DM
647 
648   Output Parameters:
649 + partSection     - The PetscSection giving the division of points by partition
650 - partition       - The list of points by partition
651 
652   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
653 
654   Level: developer
655 
656 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
657 @*/
658 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
659 {
660   PetscMPIInt    size;
661   PetscErrorCode ierr;
662 
663   PetscFunctionBegin;
664   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
665   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
666   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
667   PetscValidPointer(partition, 5);
668   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
669   if (size == 1) {
670     PetscInt *points;
671     PetscInt  cStart, cEnd, c;
672 
673     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
674     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
675     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
676     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
677     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
678     for (c = cStart; c < cEnd; ++c) points[c] = c;
679     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
680     PetscFunctionReturn(0);
681   }
682   if (part->height == 0) {
683     PetscInt  numVertices;
684     PetscInt *start     = NULL;
685     PetscInt *adjacency = NULL;
686 
687     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
688     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
689     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
690     ierr = PetscFree(start);CHKERRQ(ierr);
691     ierr = PetscFree(adjacency);CHKERRQ(ierr);
692   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
693   PetscFunctionReturn(0);
694 }
695 
696 #undef __FUNCT__
697 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
698 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
699 {
700   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
701   PetscErrorCode          ierr;
702 
703   PetscFunctionBegin;
704   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
705   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
706   ierr = PetscFree(p);CHKERRQ(ierr);
707   PetscFunctionReturn(0);
708 }
709 
710 #undef __FUNCT__
711 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
712 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
713 {
714   PetscViewerFormat format;
715   PetscErrorCode    ierr;
716 
717   PetscFunctionBegin;
718   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
719   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
720   PetscFunctionReturn(0);
721 }
722 
723 #undef __FUNCT__
724 #define __FUNCT__ "PetscPartitionerView_Shell"
725 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
726 {
727   PetscBool      iascii;
728   PetscErrorCode ierr;
729 
730   PetscFunctionBegin;
731   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
732   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
733   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
734   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
735   PetscFunctionReturn(0);
736 }
737 
738 #undef __FUNCT__
739 #define __FUNCT__ "PetscPartitionerPartition_Shell"
740 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
741 {
742   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
743   PetscInt                np;
744   PetscErrorCode          ierr;
745 
746   PetscFunctionBegin;
747   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
748   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
749   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
750   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
751   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
752   *partition = p->partition;
753   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
754   PetscFunctionReturn(0);
755 }
756 
757 #undef __FUNCT__
758 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
759 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
760 {
761   PetscFunctionBegin;
762   part->ops->view      = PetscPartitionerView_Shell;
763   part->ops->destroy   = PetscPartitionerDestroy_Shell;
764   part->ops->partition = PetscPartitionerPartition_Shell;
765   PetscFunctionReturn(0);
766 }
767 
768 /*MC
769   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
770 
771   Level: intermediate
772 
773 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
774 M*/
775 
776 #undef __FUNCT__
777 #define __FUNCT__ "PetscPartitionerCreate_Shell"
778 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
779 {
780   PetscPartitioner_Shell *p;
781   PetscErrorCode          ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
785   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
786   part->data = p;
787 
788   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
789   PetscFunctionReturn(0);
790 }
791 
792 #undef __FUNCT__
793 #define __FUNCT__ "PetscPartitionerShellSetPartition"
794 /*@C
795   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
796 
797   Collective on PART
798 
799   Input Parameters:
800 + part     - The PetscPartitioner
801 . numProcs - The number of partitions
802 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
803 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
804 
805   Level: developer
806 
807   Notes:
808 
809     It is safe to free the sizes and points arrays after use in this routine.
810 
811 .seealso DMPlexDistribute(), PetscPartitionerCreate()
812 @*/
813 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
814 {
815   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
816   PetscInt                proc, numPoints;
817   PetscErrorCode          ierr;
818 
819   PetscFunctionBegin;
820   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
821   if (sizes) {PetscValidPointer(sizes, 3);}
822   if (sizes) {PetscValidPointer(points, 4);}
823   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
824   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
825   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
826   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
827   if (sizes) {
828     for (proc = 0; proc < numProcs; ++proc) {
829       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
830     }
831   }
832   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
833   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
834   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
840 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
841 {
842   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
843   PetscErrorCode          ierr;
844 
845   PetscFunctionBegin;
846   ierr = PetscFree(p);CHKERRQ(ierr);
847   PetscFunctionReturn(0);
848 }
849 
850 #undef __FUNCT__
851 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
852 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
853 {
854   PetscViewerFormat format;
855   PetscErrorCode    ierr;
856 
857   PetscFunctionBegin;
858   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
859   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "PetscPartitionerView_Simple"
865 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
866 {
867   PetscBool      iascii;
868   PetscErrorCode ierr;
869 
870   PetscFunctionBegin;
871   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
872   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
873   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
874   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
875   PetscFunctionReturn(0);
876 }
877 
878 #undef __FUNCT__
879 #define __FUNCT__ "PetscPartitionerPartition_Simple"
880 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
881 {
882   PetscInt       np;
883   PetscErrorCode ierr;
884 
885   PetscFunctionBegin;
886   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
887   for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
888   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
889   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
890   PetscFunctionReturn(0);
891 }
892 
893 #undef __FUNCT__
894 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
895 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
896 {
897   PetscFunctionBegin;
898   part->ops->view      = PetscPartitionerView_Simple;
899   part->ops->destroy   = PetscPartitionerDestroy_Simple;
900   part->ops->partition = PetscPartitionerPartition_Simple;
901   PetscFunctionReturn(0);
902 }
903 
904 /*MC
905   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
906 
907   Level: intermediate
908 
909 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
910 M*/
911 
912 #undef __FUNCT__
913 #define __FUNCT__ "PetscPartitionerCreate_Simple"
914 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
915 {
916   PetscPartitioner_Simple *p;
917   PetscErrorCode           ierr;
918 
919   PetscFunctionBegin;
920   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
921   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
922   part->data = p;
923 
924   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
930 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
931 {
932   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
933   PetscErrorCode          ierr;
934 
935   PetscFunctionBegin;
936   ierr = PetscFree(p);CHKERRQ(ierr);
937   PetscFunctionReturn(0);
938 }
939 
940 #undef __FUNCT__
941 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
942 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
943 {
944   PetscViewerFormat format;
945   PetscErrorCode    ierr;
946 
947   PetscFunctionBegin;
948   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
949   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
950   PetscFunctionReturn(0);
951 }
952 
953 #undef __FUNCT__
954 #define __FUNCT__ "PetscPartitionerView_Chaco"
955 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
956 {
957   PetscBool      iascii;
958   PetscErrorCode ierr;
959 
960   PetscFunctionBegin;
961   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
962   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
963   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
964   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
965   PetscFunctionReturn(0);
966 }
967 
968 #if defined(PETSC_HAVE_CHACO)
969 #if defined(PETSC_HAVE_UNISTD_H)
970 #include <unistd.h>
971 #endif
972 /* Chaco does not have an include file */
973 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
974                        float *ewgts, float *x, float *y, float *z, char *outassignname,
975                        char *outfilename, short *assignment, int architecture, int ndims_tot,
976                        int mesh_dims[3], double *goal, int global_method, int local_method,
977                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
978 
979 extern int FREE_GRAPH;
980 #endif
981 
982 #undef __FUNCT__
983 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
984 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
985 {
986 #if defined(PETSC_HAVE_CHACO)
987   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
988   MPI_Comm       comm;
989   int            nvtxs          = numVertices; /* number of vertices in full graph */
990   int           *vwgts          = NULL;   /* weights for all vertices */
991   float         *ewgts          = NULL;   /* weights for all edges */
992   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
993   char          *outassignname  = NULL;   /*  name of assignment output file */
994   char          *outfilename    = NULL;   /* output file name */
995   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
996   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
997   int            mesh_dims[3];            /* dimensions of mesh of processors */
998   double        *goal          = NULL;    /* desired set sizes for each set */
999   int            global_method = 1;       /* global partitioning algorithm */
1000   int            local_method  = 1;       /* local partitioning algorithm */
1001   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1002   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1003   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1004   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1005   long           seed          = 123636512; /* for random graph mutations */
1006   short int     *assignment;              /* Output partition */
1007   int            fd_stdout, fd_pipe[2];
1008   PetscInt      *points;
1009   int            i, v, p;
1010   PetscErrorCode ierr;
1011 
1012   PetscFunctionBegin;
1013   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1014   if (!numVertices) {
1015     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1016     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1017     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1018     PetscFunctionReturn(0);
1019   }
1020   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1021   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1022 
1023   if (global_method == INERTIAL_METHOD) {
1024     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1025     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1026   }
1027   mesh_dims[0] = nparts;
1028   mesh_dims[1] = 1;
1029   mesh_dims[2] = 1;
1030   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1031   /* Chaco outputs to stdout. We redirect this to a buffer. */
1032   /* TODO: check error codes for UNIX calls */
1033 #if defined(PETSC_HAVE_UNISTD_H)
1034   {
1035     int piperet;
1036     piperet = pipe(fd_pipe);
1037     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1038     fd_stdout = dup(1);
1039     close(1);
1040     dup2(fd_pipe[1], 1);
1041   }
1042 #endif
1043   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1044                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1045                    vmax, ndims, eigtol, seed);
1046 #if defined(PETSC_HAVE_UNISTD_H)
1047   {
1048     char msgLog[10000];
1049     int  count;
1050 
1051     fflush(stdout);
1052     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1053     if (count < 0) count = 0;
1054     msgLog[count] = 0;
1055     close(1);
1056     dup2(fd_stdout, 1);
1057     close(fd_stdout);
1058     close(fd_pipe[0]);
1059     close(fd_pipe[1]);
1060     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1061   }
1062 #endif
1063   /* Convert to PetscSection+IS */
1064   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1065   for (v = 0; v < nvtxs; ++v) {
1066     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1067   }
1068   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1069   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1070   for (p = 0, i = 0; p < nparts; ++p) {
1071     for (v = 0; v < nvtxs; ++v) {
1072       if (assignment[v] == p) points[i++] = v;
1073     }
1074   }
1075   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1076   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1077   if (global_method == INERTIAL_METHOD) {
1078     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1079   }
1080   ierr = PetscFree(assignment);CHKERRQ(ierr);
1081   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1082   PetscFunctionReturn(0);
1083 #else
1084   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1085 #endif
1086 }
1087 
1088 #undef __FUNCT__
1089 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1090 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1091 {
1092   PetscFunctionBegin;
1093   part->ops->view      = PetscPartitionerView_Chaco;
1094   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1095   part->ops->partition = PetscPartitionerPartition_Chaco;
1096   PetscFunctionReturn(0);
1097 }
1098 
1099 /*MC
1100   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1101 
1102   Level: intermediate
1103 
1104 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1105 M*/
1106 
1107 #undef __FUNCT__
1108 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1109 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1110 {
1111   PetscPartitioner_Chaco *p;
1112   PetscErrorCode          ierr;
1113 
1114   PetscFunctionBegin;
1115   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1116   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1117   part->data = p;
1118 
1119   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1120   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1121   PetscFunctionReturn(0);
1122 }
1123 
1124 #undef __FUNCT__
1125 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1126 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1127 {
1128   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1129   PetscErrorCode             ierr;
1130 
1131   PetscFunctionBegin;
1132   ierr = PetscFree(p);CHKERRQ(ierr);
1133   PetscFunctionReturn(0);
1134 }
1135 
1136 #undef __FUNCT__
1137 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1138 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1139 {
1140   PetscViewerFormat format;
1141   PetscErrorCode    ierr;
1142 
1143   PetscFunctionBegin;
1144   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1145   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 #undef __FUNCT__
1150 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1151 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1152 {
1153   PetscBool      iascii;
1154   PetscErrorCode ierr;
1155 
1156   PetscFunctionBegin;
1157   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1158   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1159   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1160   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1161   PetscFunctionReturn(0);
1162 }
1163 
1164 #if defined(PETSC_HAVE_PARMETIS)
1165 #include <parmetis.h>
1166 #endif
1167 
1168 #undef __FUNCT__
1169 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1170 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1171 {
1172 #if defined(PETSC_HAVE_PARMETIS)
1173   MPI_Comm       comm;
1174   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1175   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1176   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1177   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1178   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1179   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1180   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1181   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1182   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1183   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1184   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1185   PetscInt       options[5];               /* Options */
1186   /* Outputs */
1187   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1188   PetscInt      *assignment, *points;
1189   PetscMPIInt    rank, p, v, i;
1190   PetscErrorCode ierr;
1191 
1192   PetscFunctionBegin;
1193   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1194   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1195   options[0] = 0; /* Use all defaults */
1196   /* Calculate vertex distribution */
1197   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1198   vtxdist[0] = 0;
1199   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1200   for (p = 2; p <= nparts; ++p) {
1201     vtxdist[p] += vtxdist[p-1];
1202   }
1203   /* Calculate weights */
1204   for (p = 0; p < nparts; ++p) {
1205     tpwgts[p] = 1.0/nparts;
1206   }
1207   ubvec[0] = 1.05;
1208 
1209   if (nparts == 1) {
1210     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1211   } else {
1212     if (vtxdist[1] == vtxdist[nparts]) {
1213       if (!rank) {
1214         PetscStackPush("METIS_PartGraphKway");
1215         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1216         PetscStackPop;
1217         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1218       }
1219     } else {
1220       PetscStackPush("ParMETIS_V3_PartKway");
1221       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1222       PetscStackPop;
1223       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1224     }
1225   }
1226   /* Convert to PetscSection+IS */
1227   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1228   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1229   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1230   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1231   for (p = 0, i = 0; p < nparts; ++p) {
1232     for (v = 0; v < nvtxs; ++v) {
1233       if (assignment[v] == p) points[i++] = v;
1234     }
1235   }
1236   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1237   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1238   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1239   PetscFunctionReturn(0);
1240 #else
1241   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1242 #endif
1243 }
1244 
1245 #undef __FUNCT__
1246 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1247 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1248 {
1249   PetscFunctionBegin;
1250   part->ops->view      = PetscPartitionerView_ParMetis;
1251   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1252   part->ops->partition = PetscPartitionerPartition_ParMetis;
1253   PetscFunctionReturn(0);
1254 }
1255 
1256 /*MC
1257   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1258 
1259   Level: intermediate
1260 
1261 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1262 M*/
1263 
1264 #undef __FUNCT__
1265 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1266 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1267 {
1268   PetscPartitioner_ParMetis *p;
1269   PetscErrorCode          ierr;
1270 
1271   PetscFunctionBegin;
1272   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1273   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1274   part->data = p;
1275 
1276   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1277   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1278   PetscFunctionReturn(0);
1279 }
1280 
1281 #undef __FUNCT__
1282 #define __FUNCT__ "DMPlexGetPartitioner"
1283 /*@
1284   DMPlexGetPartitioner - Get the mesh partitioner
1285 
1286   Not collective
1287 
1288   Input Parameter:
1289 . dm - The DM
1290 
1291   Output Parameter:
1292 . part - The PetscPartitioner
1293 
1294   Level: developer
1295 
1296   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1297 
1298 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1299 @*/
1300 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1301 {
1302   DM_Plex *mesh = (DM_Plex *) dm->data;
1303 
1304   PetscFunctionBegin;
1305   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1306   PetscValidPointer(part, 2);
1307   *part = mesh->partitioner;
1308   PetscFunctionReturn(0);
1309 }
1310 
1311 #undef __FUNCT__
1312 #define __FUNCT__ "DMPlexSetPartitioner"
1313 /*@
1314   DMPlexSetPartitioner - Set the mesh partitioner
1315 
1316   logically collective on dm and part
1317 
1318   Input Parameters:
1319 + dm - The DM
1320 - part - The partitioner
1321 
1322   Level: developer
1323 
1324   Note: Any existing PetscPartitioner will be destroyed.
1325 
1326 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1327 @*/
1328 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1329 {
1330   DM_Plex       *mesh = (DM_Plex *) dm->data;
1331   PetscErrorCode ierr;
1332 
1333   PetscFunctionBegin;
1334   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1335   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1336   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1337   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1338   mesh->partitioner = part;
1339   PetscFunctionReturn(0);
1340 }
1341 
1342 #undef __FUNCT__
1343 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1344 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point)
1345 {
1346   PetscInt       parent, closureSize, *closure = NULL, i, pStart, pEnd;
1347   PetscErrorCode ierr;
1348 
1349   PetscFunctionBegin;
1350   ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1351   if (parent == point) PetscFunctionReturn(0);
1352   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1353   ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1354   for (i = 0; i < closureSize; i++) {
1355     PetscInt cpoint = closure[2*i];
1356 
1357     ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1358     ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint);CHKERRQ(ierr);
1359   }
1360   ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1361   PetscFunctionReturn(0);
1362 }
1363 
1364 #undef __FUNCT__
1365 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1366 /*@
1367   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1368 
1369   Input Parameters:
1370 + dm     - The DM
1371 - label  - DMLabel assinging ranks to remote roots
1372 
1373   Level: developer
1374 
1375 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1376 @*/
1377 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1378 {
1379   IS              rankIS,   pointIS;
1380   const PetscInt *ranks,   *points;
1381   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1382   PetscInt       *closure = NULL;
1383   PetscErrorCode  ierr;
1384 
1385   PetscFunctionBegin;
1386   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1387   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1388   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1389   for (r = 0; r < numRanks; ++r) {
1390     const PetscInt rank = ranks[r];
1391 
1392     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1393     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1394     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1395     for (p = 0; p < numPoints; ++p) {
1396       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1397       for (c = 0; c < closureSize*2; c += 2) {
1398         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1399         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c]);CHKERRQ(ierr);
1400       }
1401     }
1402     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1403     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1404   }
1405   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1406   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1407   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1408   PetscFunctionReturn(0);
1409 }
1410 
1411 #undef __FUNCT__
1412 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1413 /*@
1414   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1415 
1416   Input Parameters:
1417 + dm     - The DM
1418 - label  - DMLabel assinging ranks to remote roots
1419 
1420   Level: developer
1421 
1422 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1423 @*/
1424 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1425 {
1426   IS              rankIS,   pointIS;
1427   const PetscInt *ranks,   *points;
1428   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1429   PetscInt       *adj = NULL;
1430   PetscErrorCode  ierr;
1431 
1432   PetscFunctionBegin;
1433   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1434   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1435   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1436   for (r = 0; r < numRanks; ++r) {
1437     const PetscInt rank = ranks[r];
1438 
1439     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1440     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1441     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1442     for (p = 0; p < numPoints; ++p) {
1443       adjSize = PETSC_DETERMINE;
1444       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1445       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1446     }
1447     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1448     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1449   }
1450   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1451   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1452   ierr = PetscFree(adj);CHKERRQ(ierr);
1453   PetscFunctionReturn(0);
1454 }
1455 
1456 #undef __FUNCT__
1457 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1458 /*@
1459   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1460 
1461   Input Parameters:
1462 + dm        - The DM
1463 . rootLabel - DMLabel assinging ranks to local roots
1464 . processSF - A star forest mapping into the local index on each remote rank
1465 
1466   Output Parameter:
1467 - leafLabel - DMLabel assinging ranks to remote roots
1468 
1469   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1470   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1471 
1472   Level: developer
1473 
1474 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1475 @*/
1476 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1477 {
1478   MPI_Comm           comm;
1479   PetscMPIInt        rank, numProcs;
1480   PetscInt           p, n, numNeighbors, size, l, nleaves;
1481   PetscSF            sfPoint;
1482   PetscSFNode       *rootPoints, *leafPoints;
1483   PetscSection       rootSection, leafSection;
1484   const PetscSFNode *remote;
1485   const PetscInt    *local, *neighbors;
1486   IS                 valueIS;
1487   PetscErrorCode     ierr;
1488 
1489   PetscFunctionBegin;
1490   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1491   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1492   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1493   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1494 
1495   /* Convert to (point, rank) and use actual owners */
1496   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1497   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1498   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1499   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1500   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1501   for (n = 0; n < numNeighbors; ++n) {
1502     PetscInt numPoints;
1503 
1504     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1505     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1506   }
1507   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1508   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1509   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1510   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1511   for (n = 0; n < numNeighbors; ++n) {
1512     IS              pointIS;
1513     const PetscInt *points;
1514     PetscInt        off, numPoints, p;
1515 
1516     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1517     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1518     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1519     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1520     for (p = 0; p < numPoints; ++p) {
1521       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1522       else       {l = -1;}
1523       if (l >= 0) {rootPoints[off+p] = remote[l];}
1524       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1525     }
1526     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1527     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1528   }
1529   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1530   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1531   /* Communicate overlap */
1532   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1533   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1534   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1535   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1536   for (p = 0; p < size; p++) {
1537     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1538   }
1539   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1540   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1541   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1542   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1543   PetscFunctionReturn(0);
1544 }
1545 
1546 #undef __FUNCT__
1547 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1548 /*@
1549   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1550 
1551   Input Parameters:
1552 + dm    - The DM
1553 . label - DMLabel assinging ranks to remote roots
1554 
1555   Output Parameter:
1556 - sf    - The star forest communication context encapsulating the defined mapping
1557 
1558   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1559 
1560   Level: developer
1561 
1562 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1563 @*/
1564 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1565 {
1566   PetscMPIInt     rank, numProcs;
1567   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1568   PetscSFNode    *remotePoints;
1569   IS              remoteRootIS;
1570   const PetscInt *remoteRoots;
1571   PetscErrorCode ierr;
1572 
1573   PetscFunctionBegin;
1574   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1575   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1576 
1577   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1578     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1579     numRemote += numPoints;
1580   }
1581   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1582   /* Put owned points first */
1583   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1584   if (numPoints > 0) {
1585     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1586     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1587     for (p = 0; p < numPoints; p++) {
1588       remotePoints[idx].index = remoteRoots[p];
1589       remotePoints[idx].rank = rank;
1590       idx++;
1591     }
1592     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1593     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1594   }
1595   /* Now add remote points */
1596   for (n = 0; n < numProcs; ++n) {
1597     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1598     if (numPoints <= 0 || n == rank) continue;
1599     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1600     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1601     for (p = 0; p < numPoints; p++) {
1602       remotePoints[idx].index = remoteRoots[p];
1603       remotePoints[idx].rank = n;
1604       idx++;
1605     }
1606     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1607     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1608   }
1609   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1610   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1611   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1612   PetscFunctionReturn(0);
1613 }
1614