xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 8cfe4c1f06d0c32e59a0d6d8dd36f10663364657)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc2(*numVertices+1, &vOffsets, size, adjacency);CHKERRQ(ierr);
104   for (p = 0; p < *numVertices; p++) {ierr = PetscSectionGetOffset(section, p, &(vOffsets[p]));CHKERRQ(ierr);}
105   vOffsets[*numVertices] = size;
106   if (offsets) *offsets = vOffsets;
107   if (adjacency) {
108     ierr = PetscSegBufferExtractTo(adjBuffer, *adjacency);CHKERRQ(ierr);
109     if (nroots > 0) {
110       ISLocalToGlobalMapping ltogCells;
111       PetscInt n, size, *cells_arr;
112       /* In parallel, apply a global cell numbering to the graph */
113       ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
114       ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
115       ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
116       ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
117       /* Convert to positive global cell numbers */
118       for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
119       ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120       ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], *adjacency, *adjacency);CHKERRQ(ierr);
121       ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
122     }
123   }
124   /* Clean up */
125   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
126   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
127   ierr = PetscFree(adj);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "DMPlexCreateNeighborCSR"
133 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
134 {
135   const PetscInt maxFaceCases = 30;
136   PetscInt       numFaceCases = 0;
137   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
138   PetscInt      *off, *adj;
139   PetscInt      *neighborCells = NULL;
140   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
141   PetscErrorCode ierr;
142 
143   PetscFunctionBegin;
144   /* For parallel partitioning, I think you have to communicate supports */
145   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
146   cellDim = dim - cellHeight;
147   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
148   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
149   if (cEnd - cStart == 0) {
150     if (numVertices) *numVertices = 0;
151     if (offsets)   *offsets   = NULL;
152     if (adjacency) *adjacency = NULL;
153     PetscFunctionReturn(0);
154   }
155   numCells  = cEnd - cStart;
156   faceDepth = depth - cellHeight;
157   if (dim == depth) {
158     PetscInt f, fStart, fEnd;
159 
160     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
161     /* Count neighboring cells */
162     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
163     for (f = fStart; f < fEnd; ++f) {
164       const PetscInt *support;
165       PetscInt        supportSize;
166       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
167       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
168       if (supportSize == 2) {
169         PetscInt numChildren;
170 
171         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
172         if (!numChildren) {
173           ++off[support[0]-cStart+1];
174           ++off[support[1]-cStart+1];
175         }
176       }
177     }
178     /* Prefix sum */
179     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
180     if (adjacency) {
181       PetscInt *tmp;
182 
183       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
184       ierr = PetscMalloc1((numCells+1), &tmp);CHKERRQ(ierr);
185       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
186       /* Get neighboring cells */
187       for (f = fStart; f < fEnd; ++f) {
188         const PetscInt *support;
189         PetscInt        supportSize;
190         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
191         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
192         if (supportSize == 2) {
193           PetscInt numChildren;
194 
195           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
196           if (!numChildren) {
197             adj[tmp[support[0]-cStart]++] = support[1];
198             adj[tmp[support[1]-cStart]++] = support[0];
199           }
200         }
201       }
202 #if defined(PETSC_USE_DEBUG)
203       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
204 #endif
205       ierr = PetscFree(tmp);CHKERRQ(ierr);
206     }
207     if (numVertices) *numVertices = numCells;
208     if (offsets)   *offsets   = off;
209     if (adjacency) *adjacency = adj;
210     PetscFunctionReturn(0);
211   }
212   /* Setup face recognition */
213   if (faceDepth == 1) {
214     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
215 
216     for (c = cStart; c < cEnd; ++c) {
217       PetscInt corners;
218 
219       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
220       if (!cornersSeen[corners]) {
221         PetscInt nFV;
222 
223         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
224         cornersSeen[corners] = 1;
225 
226         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
227 
228         numFaceVertices[numFaceCases++] = nFV;
229       }
230     }
231   }
232   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
233   /* Count neighboring cells */
234   for (cell = cStart; cell < cEnd; ++cell) {
235     PetscInt numNeighbors = PETSC_DETERMINE, n;
236 
237     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
238     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
239     for (n = 0; n < numNeighbors; ++n) {
240       PetscInt        cellPair[2];
241       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
242       PetscInt        meetSize = 0;
243       const PetscInt *meet    = NULL;
244 
245       cellPair[0] = cell; cellPair[1] = neighborCells[n];
246       if (cellPair[0] == cellPair[1]) continue;
247       if (!found) {
248         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
249         if (meetSize) {
250           PetscInt f;
251 
252           for (f = 0; f < numFaceCases; ++f) {
253             if (numFaceVertices[f] == meetSize) {
254               found = PETSC_TRUE;
255               break;
256             }
257           }
258         }
259         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
260       }
261       if (found) ++off[cell-cStart+1];
262     }
263   }
264   /* Prefix sum */
265   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
266 
267   if (adjacency) {
268     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
269     /* Get neighboring cells */
270     for (cell = cStart; cell < cEnd; ++cell) {
271       PetscInt numNeighbors = PETSC_DETERMINE, n;
272       PetscInt cellOffset   = 0;
273 
274       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
275       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
276       for (n = 0; n < numNeighbors; ++n) {
277         PetscInt        cellPair[2];
278         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
279         PetscInt        meetSize = 0;
280         const PetscInt *meet    = NULL;
281 
282         cellPair[0] = cell; cellPair[1] = neighborCells[n];
283         if (cellPair[0] == cellPair[1]) continue;
284         if (!found) {
285           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
286           if (meetSize) {
287             PetscInt f;
288 
289             for (f = 0; f < numFaceCases; ++f) {
290               if (numFaceVertices[f] == meetSize) {
291                 found = PETSC_TRUE;
292                 break;
293               }
294             }
295           }
296           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
297         }
298         if (found) {
299           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
300           ++cellOffset;
301         }
302       }
303     }
304   }
305   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
306   if (numVertices) *numVertices = numCells;
307   if (offsets)   *offsets   = off;
308   if (adjacency) *adjacency = adj;
309   PetscFunctionReturn(0);
310 }
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "DMPlexEnlargePartition"
314 /* Expand the partition by BFS on the adjacency graph */
315 PetscErrorCode DMPlexEnlargePartition(DM dm, const PetscInt start[], const PetscInt adjacency[], PetscSection origPartSection, IS origPartition, PetscSection partSection, IS *partition)
316 {
317   PetscHashI      h;
318   const PetscInt *points;
319   PetscInt      **tmpPoints, *newPoints, totPoints = 0;
320   PetscInt        pStart, pEnd, part, q;
321   PetscBool       useCone;
322   PetscErrorCode  ierr;
323 
324   PetscFunctionBegin;
325   PetscHashICreate(h);
326   ierr = PetscSectionGetChart(origPartSection, &pStart, &pEnd);CHKERRQ(ierr);
327   ierr = PetscSectionSetChart(partSection, pStart, pEnd);CHKERRQ(ierr);
328   ierr = ISGetIndices(origPartition, &points);CHKERRQ(ierr);
329   ierr = PetscMalloc1((pEnd - pStart), &tmpPoints);CHKERRQ(ierr);
330   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
331   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
332   for (part = pStart; part < pEnd; ++part) {
333     PetscInt *adj = NULL;
334     PetscInt  numPoints, nP, numNewPoints, off, p, n = 0;
335 
336     PetscHashIClear(h);
337     ierr = PetscSectionGetDof(origPartSection, part, &numPoints);CHKERRQ(ierr);
338     ierr = PetscSectionGetOffset(origPartSection, part, &off);CHKERRQ(ierr);
339     /* Add all existing points to h */
340     for (p = 0; p < numPoints; ++p) {
341       const PetscInt point = points[off+p];
342       PetscHashIAdd(h, point, 1);
343     }
344     PetscHashISize(h, nP);
345     if (nP != numPoints) SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Invalid partition has %d points, but only %d were unique", numPoints, nP);
346     /* Add all points in next BFS level */
347     for (p = 0; p < numPoints; ++p) {
348       const PetscInt point   = points[off+p];
349       PetscInt       adjSize = PETSC_DETERMINE, a;
350 
351       ierr = DMPlexGetAdjacency(dm, point, &adjSize, &adj);CHKERRQ(ierr);
352       for (a = 0; a < adjSize; ++a) PetscHashIAdd(h, adj[a], 1);
353     }
354     PetscHashISize(h, numNewPoints);
355     ierr = PetscSectionSetDof(partSection, part, numNewPoints);CHKERRQ(ierr);
356     ierr = PetscMalloc1(numNewPoints, &tmpPoints[part]);CHKERRQ(ierr);
357     ierr = PetscHashIGetKeys(h, &n, tmpPoints[part]);CHKERRQ(ierr);
358     ierr = PetscFree(adj);CHKERRQ(ierr);
359     totPoints += numNewPoints;
360   }
361   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
362   ierr = ISRestoreIndices(origPartition, &points);CHKERRQ(ierr);
363   PetscHashIDestroy(h);
364   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
365   ierr = PetscMalloc1(totPoints, &newPoints);CHKERRQ(ierr);
366   for (part = pStart, q = 0; part < pEnd; ++part) {
367     PetscInt numPoints, p;
368 
369     ierr = PetscSectionGetDof(partSection, part, &numPoints);CHKERRQ(ierr);
370     for (p = 0; p < numPoints; ++p, ++q) newPoints[q] = tmpPoints[part][p];
371     ierr = PetscFree(tmpPoints[part]);CHKERRQ(ierr);
372   }
373   ierr = PetscFree(tmpPoints);CHKERRQ(ierr);
374   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), totPoints, newPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
375   PetscFunctionReturn(0);
376 }
377 
378 #undef __FUNCT__
379 #define __FUNCT__ "PetscPartitionerRegister"
380 /*@C
381   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
382 
383   Not Collective
384 
385   Input Parameters:
386 + name        - The name of a new user-defined creation routine
387 - create_func - The creation routine itself
388 
389   Notes:
390   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
391 
392   Sample usage:
393 .vb
394     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
395 .ve
396 
397   Then, your PetscPartitioner type can be chosen with the procedural interface via
398 .vb
399     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
400     PetscPartitionerSetType(PetscPartitioner, "my_part");
401 .ve
402    or at runtime via the option
403 .vb
404     -petscpartitioner_type my_part
405 .ve
406 
407   Level: advanced
408 
409 .keywords: PetscPartitioner, register
410 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
411 
412 @*/
413 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
414 {
415   PetscErrorCode ierr;
416 
417   PetscFunctionBegin;
418   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
419   PetscFunctionReturn(0);
420 }
421 
422 #undef __FUNCT__
423 #define __FUNCT__ "PetscPartitionerSetType"
424 /*@C
425   PetscPartitionerSetType - Builds a particular PetscPartitioner
426 
427   Collective on PetscPartitioner
428 
429   Input Parameters:
430 + part - The PetscPartitioner object
431 - name - The kind of partitioner
432 
433   Options Database Key:
434 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
435 
436   Level: intermediate
437 
438 .keywords: PetscPartitioner, set, type
439 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
440 @*/
441 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
442 {
443   PetscErrorCode (*r)(PetscPartitioner);
444   PetscBool      match;
445   PetscErrorCode ierr;
446 
447   PetscFunctionBegin;
448   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
449   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
450   if (match) PetscFunctionReturn(0);
451 
452   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
453   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
454   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
455 
456   if (part->ops->destroy) {
457     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
458     part->ops->destroy = NULL;
459   }
460   ierr = (*r)(part);CHKERRQ(ierr);
461   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
462   PetscFunctionReturn(0);
463 }
464 
465 #undef __FUNCT__
466 #define __FUNCT__ "PetscPartitionerGetType"
467 /*@C
468   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
469 
470   Not Collective
471 
472   Input Parameter:
473 . part - The PetscPartitioner
474 
475   Output Parameter:
476 . name - The PetscPartitioner type name
477 
478   Level: intermediate
479 
480 .keywords: PetscPartitioner, get, type, name
481 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
482 @*/
483 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
484 {
485   PetscErrorCode ierr;
486 
487   PetscFunctionBegin;
488   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
489   PetscValidPointer(name, 2);
490   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
491   *name = ((PetscObject) part)->type_name;
492   PetscFunctionReturn(0);
493 }
494 
495 #undef __FUNCT__
496 #define __FUNCT__ "PetscPartitionerView"
497 /*@C
498   PetscPartitionerView - Views a PetscPartitioner
499 
500   Collective on PetscPartitioner
501 
502   Input Parameter:
503 + part - the PetscPartitioner object to view
504 - v    - the viewer
505 
506   Level: developer
507 
508 .seealso: PetscPartitionerDestroy()
509 @*/
510 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
511 {
512   PetscErrorCode ierr;
513 
514   PetscFunctionBegin;
515   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
516   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
517   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
518   PetscFunctionReturn(0);
519 }
520 
521 #undef __FUNCT__
522 #define __FUNCT__ "PetscPartitionerViewFromOptions"
523 /*
524   PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed.
525 
526   Collective on PetscPartitioner
527 
528   Input Parameters:
529 + part   - the PetscPartitioner
530 . prefix - prefix to use for viewing, or NULL
531 - optionname - option to activate viewing
532 
533   Level: intermediate
534 
535 .keywords: PetscPartitioner, view, options, database
536 .seealso: VecViewFromOptions(), MatViewFromOptions()
537 */
538 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[])
539 {
540   PetscViewer       viewer;
541   PetscViewerFormat format;
542   PetscBool         flg;
543   PetscErrorCode    ierr;
544 
545   PetscFunctionBegin;
546   if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix,                       optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
547   else        {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
548   if (flg) {
549     ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr);
550     ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr);
551     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
552     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
553   }
554   PetscFunctionReturn(0);
555 }
556 #undef __FUNCT__
557 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
558 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
559 {
560   const char    *defaultType;
561   char           name[256];
562   PetscBool      flg;
563   PetscErrorCode ierr;
564 
565   PetscFunctionBegin;
566   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
567   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
568   else                                  defaultType = ((PetscObject) part)->type_name;
569   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
570 
571   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
572   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
573   if (flg) {
574     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
575   } else if (!((PetscObject) part)->type_name) {
576     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
577   }
578   ierr = PetscOptionsEnd();CHKERRQ(ierr);
579   PetscFunctionReturn(0);
580 }
581 
582 #undef __FUNCT__
583 #define __FUNCT__ "PetscPartitionerSetFromOptions"
584 /*@
585   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
586 
587   Collective on PetscPartitioner
588 
589   Input Parameter:
590 . part - the PetscPartitioner object to set options for
591 
592   Level: developer
593 
594 .seealso: PetscPartitionerView()
595 @*/
596 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
597 {
598   PetscErrorCode ierr;
599 
600   PetscFunctionBegin;
601   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
602   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
603 
604   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
605   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
606   /* process any options handlers added with PetscObjectAddOptionsHandler() */
607   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
608   ierr = PetscOptionsEnd();CHKERRQ(ierr);
609   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
610   PetscFunctionReturn(0);
611 }
612 
613 #undef __FUNCT__
614 #define __FUNCT__ "PetscPartitionerSetUp"
615 /*@C
616   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
617 
618   Collective on PetscPartitioner
619 
620   Input Parameter:
621 . part - the PetscPartitioner object to setup
622 
623   Level: developer
624 
625 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
626 @*/
627 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
628 {
629   PetscErrorCode ierr;
630 
631   PetscFunctionBegin;
632   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
633   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
634   PetscFunctionReturn(0);
635 }
636 
637 #undef __FUNCT__
638 #define __FUNCT__ "PetscPartitionerDestroy"
639 /*@
640   PetscPartitionerDestroy - Destroys a PetscPartitioner object
641 
642   Collective on PetscPartitioner
643 
644   Input Parameter:
645 . part - the PetscPartitioner object to destroy
646 
647   Level: developer
648 
649 .seealso: PetscPartitionerView()
650 @*/
651 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
652 {
653   PetscErrorCode ierr;
654 
655   PetscFunctionBegin;
656   if (!*part) PetscFunctionReturn(0);
657   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
658 
659   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
660   ((PetscObject) (*part))->refct = 0;
661 
662   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
663   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
664   PetscFunctionReturn(0);
665 }
666 
667 #undef __FUNCT__
668 #define __FUNCT__ "PetscPartitionerCreate"
669 /*@
670   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
671 
672   Collective on MPI_Comm
673 
674   Input Parameter:
675 . comm - The communicator for the PetscPartitioner object
676 
677   Output Parameter:
678 . part - The PetscPartitioner object
679 
680   Level: beginner
681 
682 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL
683 @*/
684 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
685 {
686   PetscPartitioner p;
687   PetscErrorCode   ierr;
688 
689   PetscFunctionBegin;
690   PetscValidPointer(part, 2);
691   *part = NULL;
692   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
693 
694   ierr = PetscHeaderCreate(p, _p_PetscPartitioner, struct _PetscPartitionerOps, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
695   ierr = PetscMemzero(p->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
696 
697   *part = p;
698   PetscFunctionReturn(0);
699 }
700 
701 #undef __FUNCT__
702 #define __FUNCT__ "PetscPartitionerPartition"
703 /*@
704   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
705 
706   Collective on DM
707 
708   Input Parameters:
709 + part    - The PetscPartitioner
710 - dm      - The mesh DM
711 
712   Output Parameters:
713 + partSection     - The PetscSection giving the division of points by partition
714 - partition       - The list of points by partition
715 
716   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
717 
718   Level: developer
719 
720 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
721 @*/
722 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
723 {
724   PetscMPIInt    size;
725   PetscErrorCode ierr;
726 
727   PetscFunctionBegin;
728   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
729   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
730   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
731   PetscValidPointer(partition, 5);
732   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
733   if (size == 1) {
734     PetscInt *points;
735     PetscInt  cStart, cEnd, c;
736 
737     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
738     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
739     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
740     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
741     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
742     for (c = cStart; c < cEnd; ++c) points[c] = c;
743     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
744     PetscFunctionReturn(0);
745   }
746   if (part->height == 0) {
747     PetscInt  numVertices;
748     PetscInt *start     = NULL;
749     PetscInt *adjacency = NULL;
750 
751     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
752     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
753     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
754     ierr = PetscFree(start);CHKERRQ(ierr);
755     ierr = PetscFree(adjacency);CHKERRQ(ierr);
756   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
757   PetscFunctionReturn(0);
758 }
759 
760 #undef __FUNCT__
761 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
762 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
763 {
764   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
765   PetscErrorCode          ierr;
766 
767   PetscFunctionBegin;
768   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
769   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
770   ierr = PetscFree(p);CHKERRQ(ierr);
771   PetscFunctionReturn(0);
772 }
773 
774 #undef __FUNCT__
775 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
776 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
777 {
778   PetscViewerFormat format;
779   PetscErrorCode    ierr;
780 
781   PetscFunctionBegin;
782   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
783   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
784   PetscFunctionReturn(0);
785 }
786 
787 #undef __FUNCT__
788 #define __FUNCT__ "PetscPartitionerView_Shell"
789 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
790 {
791   PetscBool      iascii;
792   PetscErrorCode ierr;
793 
794   PetscFunctionBegin;
795   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
796   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
797   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
798   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "PetscPartitionerPartition_Shell"
804 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
805 {
806   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
807   PetscInt                np;
808   PetscErrorCode          ierr;
809 
810   PetscFunctionBegin;
811   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
812   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
813   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
814   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
815   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
816   *partition = p->partition;
817   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
818   PetscFunctionReturn(0);
819 }
820 
821 #undef __FUNCT__
822 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
823 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
824 {
825   PetscFunctionBegin;
826   part->ops->view      = PetscPartitionerView_Shell;
827   part->ops->destroy   = PetscPartitionerDestroy_Shell;
828   part->ops->partition = PetscPartitionerPartition_Shell;
829   PetscFunctionReturn(0);
830 }
831 
832 /*MC
833   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
834 
835   Level: intermediate
836 
837 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
838 M*/
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "PetscPartitionerCreate_Shell"
842 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
843 {
844   PetscPartitioner_Shell *p;
845   PetscErrorCode          ierr;
846 
847   PetscFunctionBegin;
848   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
849   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
850   part->data = p;
851 
852   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "PetscPartitionerShellSetPartition"
858 /*@C
859   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
860 
861   Collective on DM
862 
863   Input Parameters:
864 + part    - The PetscPartitioner
865 . dm      - The mesh DM
866 - enlarge - Expand each partition with neighbors
867 
868   Level: developer
869 
870 .seealso DMPlexDistribute(), PetscPartitionerCreate()
871 @*/
872 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
873 {
874   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
875   PetscInt                proc, numPoints;
876   PetscErrorCode          ierr;
877 
878   PetscFunctionBegin;
879   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
880   if (sizes) {PetscValidPointer(sizes, 3);}
881   if (sizes) {PetscValidPointer(points, 4);}
882   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
883   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
884   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
885   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
886   if (sizes) {
887     for (proc = 0; proc < numProcs; ++proc) {
888       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
889     }
890   }
891   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
892   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
893   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
894   PetscFunctionReturn(0);
895 }
896 
897 #undef __FUNCT__
898 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
899 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
900 {
901   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
902   PetscErrorCode          ierr;
903 
904   PetscFunctionBegin;
905   ierr = PetscFree(p);CHKERRQ(ierr);
906   PetscFunctionReturn(0);
907 }
908 
909 #undef __FUNCT__
910 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
911 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
912 {
913   PetscViewerFormat format;
914   PetscErrorCode    ierr;
915 
916   PetscFunctionBegin;
917   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
918   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
919   PetscFunctionReturn(0);
920 }
921 
922 #undef __FUNCT__
923 #define __FUNCT__ "PetscPartitionerView_Chaco"
924 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
925 {
926   PetscBool      iascii;
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
931   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
932   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
933   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
934   PetscFunctionReturn(0);
935 }
936 
937 #if defined(PETSC_HAVE_CHACO)
938 #if defined(PETSC_HAVE_UNISTD_H)
939 #include <unistd.h>
940 #endif
941 /* Chaco does not have an include file */
942 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
943                        float *ewgts, float *x, float *y, float *z, char *outassignname,
944                        char *outfilename, short *assignment, int architecture, int ndims_tot,
945                        int mesh_dims[3], double *goal, int global_method, int local_method,
946                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
947 
948 extern int FREE_GRAPH;
949 #endif
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
953 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
954 {
955 #if defined(PETSC_HAVE_CHACO)
956   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
957   MPI_Comm       comm;
958   int            nvtxs          = numVertices; /* number of vertices in full graph */
959   int           *vwgts          = NULL;   /* weights for all vertices */
960   float         *ewgts          = NULL;   /* weights for all edges */
961   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
962   char          *outassignname  = NULL;   /*  name of assignment output file */
963   char          *outfilename    = NULL;   /* output file name */
964   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
965   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
966   int            mesh_dims[3];            /* dimensions of mesh of processors */
967   double        *goal          = NULL;    /* desired set sizes for each set */
968   int            global_method = 1;       /* global partitioning algorithm */
969   int            local_method  = 1;       /* local partitioning algorithm */
970   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
971   int            vmax          = 200;     /* how many vertices to coarsen down to? */
972   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
973   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
974   long           seed          = 123636512; /* for random graph mutations */
975   short int     *assignment;              /* Output partition */
976   int            fd_stdout, fd_pipe[2];
977   PetscInt      *points;
978   int            i, v, p;
979   PetscErrorCode ierr;
980 
981   PetscFunctionBegin;
982   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
983   if (!numVertices) {
984     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
985     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
986     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
987     PetscFunctionReturn(0);
988   }
989   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
990   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
991 
992   if (global_method == INERTIAL_METHOD) {
993     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
994     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
995   }
996   mesh_dims[0] = nparts;
997   mesh_dims[1] = 1;
998   mesh_dims[2] = 1;
999   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1000   /* Chaco outputs to stdout. We redirect this to a buffer. */
1001   /* TODO: check error codes for UNIX calls */
1002 #if defined(PETSC_HAVE_UNISTD_H)
1003   {
1004     int piperet;
1005     piperet = pipe(fd_pipe);
1006     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1007     fd_stdout = dup(1);
1008     close(1);
1009     dup2(fd_pipe[1], 1);
1010   }
1011 #endif
1012   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1013                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1014                    vmax, ndims, eigtol, seed);
1015 #if defined(PETSC_HAVE_UNISTD_H)
1016   {
1017     char msgLog[10000];
1018     int  count;
1019 
1020     fflush(stdout);
1021     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1022     if (count < 0) count = 0;
1023     msgLog[count] = 0;
1024     close(1);
1025     dup2(fd_stdout, 1);
1026     close(fd_stdout);
1027     close(fd_pipe[0]);
1028     close(fd_pipe[1]);
1029     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1030   }
1031 #endif
1032   /* Convert to PetscSection+IS */
1033   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1034   for (v = 0; v < nvtxs; ++v) {
1035     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1036   }
1037   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1038   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1039   for (p = 0, i = 0; p < nparts; ++p) {
1040     for (v = 0; v < nvtxs; ++v) {
1041       if (assignment[v] == p) points[i++] = v;
1042     }
1043   }
1044   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1045   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1046   if (global_method == INERTIAL_METHOD) {
1047     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1048   }
1049   ierr = PetscFree(assignment);CHKERRQ(ierr);
1050   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1051   PetscFunctionReturn(0);
1052 #else
1053   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1054 #endif
1055 }
1056 
1057 #undef __FUNCT__
1058 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1059 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1060 {
1061   PetscFunctionBegin;
1062   part->ops->view      = PetscPartitionerView_Chaco;
1063   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1064   part->ops->partition = PetscPartitionerPartition_Chaco;
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 /*MC
1069   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1070 
1071   Level: intermediate
1072 
1073 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1074 M*/
1075 
1076 #undef __FUNCT__
1077 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1078 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1079 {
1080   PetscPartitioner_Chaco *p;
1081   PetscErrorCode          ierr;
1082 
1083   PetscFunctionBegin;
1084   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1085   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1086   part->data = p;
1087 
1088   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1089   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1090   PetscFunctionReturn(0);
1091 }
1092 
1093 #undef __FUNCT__
1094 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1095 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1096 {
1097   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1098   PetscErrorCode             ierr;
1099 
1100   PetscFunctionBegin;
1101   ierr = PetscFree(p);CHKERRQ(ierr);
1102   PetscFunctionReturn(0);
1103 }
1104 
1105 #undef __FUNCT__
1106 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1107 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1108 {
1109   PetscViewerFormat format;
1110   PetscErrorCode    ierr;
1111 
1112   PetscFunctionBegin;
1113   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1114   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1115   PetscFunctionReturn(0);
1116 }
1117 
1118 #undef __FUNCT__
1119 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1120 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1121 {
1122   PetscBool      iascii;
1123   PetscErrorCode ierr;
1124 
1125   PetscFunctionBegin;
1126   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1127   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1128   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1129   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1130   PetscFunctionReturn(0);
1131 }
1132 
1133 #if defined(PETSC_HAVE_PARMETIS)
1134 #include <parmetis.h>
1135 #endif
1136 
1137 #undef __FUNCT__
1138 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1139 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1140 {
1141 #if defined(PETSC_HAVE_PARMETIS)
1142   MPI_Comm       comm;
1143   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1144   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1145   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1146   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1147   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1148   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1149   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1150   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1151   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1152   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1153   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1154   PetscInt       options[5];               /* Options */
1155   /* Outputs */
1156   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1157   PetscInt      *assignment, *points;
1158   PetscMPIInt    rank, p, v, i;
1159   PetscErrorCode ierr;
1160 
1161   PetscFunctionBegin;
1162   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1163   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1164   options[0] = 0; /* Use all defaults */
1165   /* Calculate vertex distribution */
1166   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1167   vtxdist[0] = 0;
1168   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1169   for (p = 2; p <= nparts; ++p) {
1170     vtxdist[p] += vtxdist[p-1];
1171   }
1172   /* Calculate weights */
1173   for (p = 0; p < nparts; ++p) {
1174     tpwgts[p] = 1.0/nparts;
1175   }
1176   ubvec[0] = 1.05;
1177 
1178   if (nparts == 1) {
1179     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1180   } else {
1181     if (vtxdist[1] == vtxdist[nparts]) {
1182       if (!rank) {
1183         PetscStackPush("METIS_PartGraphKway");
1184         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1185         PetscStackPop;
1186         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1187       }
1188     } else {
1189       PetscStackPush("ParMETIS_V3_PartKway");
1190       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1191       PetscStackPop;
1192       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1193     }
1194   }
1195   /* Convert to PetscSection+IS */
1196   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1197   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1198   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1199   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1200   for (p = 0, i = 0; p < nparts; ++p) {
1201     for (v = 0; v < nvtxs; ++v) {
1202       if (assignment[v] == p) points[i++] = v;
1203     }
1204   }
1205   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1206   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1207   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1208 #else
1209   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1210 #endif
1211   PetscFunctionReturn(0);
1212 }
1213 
1214 #undef __FUNCT__
1215 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1216 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1217 {
1218   PetscFunctionBegin;
1219   part->ops->view      = PetscPartitionerView_ParMetis;
1220   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1221   part->ops->partition = PetscPartitionerPartition_ParMetis;
1222   PetscFunctionReturn(0);
1223 }
1224 
1225 /*MC
1226   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1227 
1228   Level: intermediate
1229 
1230 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1231 M*/
1232 
1233 #undef __FUNCT__
1234 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1235 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1236 {
1237   PetscPartitioner_ParMetis *p;
1238   PetscErrorCode          ierr;
1239 
1240   PetscFunctionBegin;
1241   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1242   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1243   part->data = p;
1244 
1245   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1246   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1247   PetscFunctionReturn(0);
1248 }
1249 
1250 #undef __FUNCT__
1251 #define __FUNCT__ "DMPlexGetPartitioner"
1252 /*@
1253   DMPlexGetPartitioner - Get the mesh partitioner
1254 
1255   Not collective
1256 
1257   Input Parameter:
1258 . dm - The DM
1259 
1260   Output Parameter:
1261 . part - The PetscPartitioner
1262 
1263   Level: developer
1264 
1265 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1266 @*/
1267 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1268 {
1269   DM_Plex *mesh = (DM_Plex *) dm->data;
1270 
1271   PetscFunctionBegin;
1272   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1273   PetscValidPointer(part, 2);
1274   *part = mesh->partitioner;
1275   PetscFunctionReturn(0);
1276 }
1277 
1278 #undef __FUNCT__
1279 #define __FUNCT__ "DMPlexMarkTreeClosure"
1280 static PetscErrorCode DMPlexMarkTreeClosure(DM dm, PetscSegBuffer segpart, PetscBT bt, PetscInt point, PetscInt *partSize)
1281 {
1282   PetscInt       parent, closureSize, *closure = NULL, i, pStart, pEnd;
1283   PetscErrorCode ierr;
1284 
1285   PetscFunctionBegin;
1286   ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1287   if (parent == point) PetscFunctionReturn(0);
1288   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1289   ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1290   for (i = 0; i < closureSize; i++) {
1291     PetscInt cpoint = closure[2*i];
1292 
1293     if (!PetscBTLookupSet(bt,cpoint-pStart)) {
1294       PetscInt *PETSC_RESTRICT pt;
1295       (*partSize)++;
1296       ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr);
1297       *pt = cpoint;
1298     }
1299     ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,partSize);CHKERRQ(ierr);
1300   }
1301   ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1302   PetscFunctionReturn(0);
1303 }
1304 
1305 #undef __FUNCT__
1306 #define __FUNCT__ "DMPlexCreatePartitionClosure"
1307 PetscErrorCode DMPlexCreatePartitionClosure(DM dm, PetscSection pointSection, IS pointPartition, PetscSection *section, IS *partition)
1308 {
1309   /* const PetscInt  height = 0; */
1310   const PetscInt *partArray;
1311   PetscInt       *allPoints, *packPoints;
1312   PetscInt        rStart, rEnd, rank, pStart, pEnd, newSize;
1313   PetscErrorCode  ierr;
1314   PetscBT         bt;
1315   PetscSegBuffer  segpack,segpart;
1316 
1317   PetscFunctionBegin;
1318   ierr = PetscSectionGetChart(pointSection, &rStart, &rEnd);CHKERRQ(ierr);
1319   ierr = ISGetIndices(pointPartition, &partArray);CHKERRQ(ierr);
1320   ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), section);CHKERRQ(ierr);
1321   ierr = PetscSectionSetChart(*section, rStart, rEnd);CHKERRQ(ierr);
1322   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1323   ierr = PetscBTCreate(pEnd-pStart,&bt);CHKERRQ(ierr);
1324   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpack);CHKERRQ(ierr);
1325   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpart);CHKERRQ(ierr);
1326   for (rank = rStart; rank < rEnd; ++rank) {
1327     PetscInt partSize = 0, numPoints, offset, p, *PETSC_RESTRICT placePoints;
1328 
1329     ierr = PetscSectionGetDof(pointSection, rank, &numPoints);CHKERRQ(ierr);
1330     ierr = PetscSectionGetOffset(pointSection, rank, &offset);CHKERRQ(ierr);
1331     for (p = 0; p < numPoints; ++p) {
1332       PetscInt  point   = partArray[offset+p], closureSize, c;
1333       PetscInt *closure = NULL;
1334 
1335       /* TODO Include support for height > 0 case */
1336       ierr = DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1337       for (c=0; c<closureSize; c++) {
1338         PetscInt cpoint = closure[c*2];
1339         if (!PetscBTLookupSet(bt,cpoint-pStart)) {
1340           PetscInt *PETSC_RESTRICT pt;
1341           partSize++;
1342           ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr);
1343           *pt = cpoint;
1344         }
1345         ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,&partSize);CHKERRQ(ierr);
1346       }
1347       ierr = DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1348     }
1349     ierr = PetscSectionSetDof(*section, rank, partSize);CHKERRQ(ierr);
1350     ierr = PetscSegBufferGetInts(segpack,partSize,&placePoints);CHKERRQ(ierr);
1351     ierr = PetscSegBufferExtractTo(segpart,placePoints);CHKERRQ(ierr);
1352     ierr = PetscSortInt(partSize,placePoints);CHKERRQ(ierr);
1353     for (p=0; p<partSize; p++) {ierr = PetscBTClear(bt,placePoints[p]-pStart);CHKERRQ(ierr);}
1354   }
1355   ierr = PetscBTDestroy(&bt);CHKERRQ(ierr);
1356   ierr = PetscSegBufferDestroy(&segpart);CHKERRQ(ierr);
1357 
1358   ierr = PetscSectionSetUp(*section);CHKERRQ(ierr);
1359   ierr = PetscSectionGetStorageSize(*section, &newSize);CHKERRQ(ierr);
1360   ierr = PetscMalloc1(newSize, &allPoints);CHKERRQ(ierr);
1361 
1362   ierr = PetscSegBufferExtractInPlace(segpack,&packPoints);CHKERRQ(ierr);
1363   for (rank = rStart; rank < rEnd; ++rank) {
1364     PetscInt numPoints, offset;
1365 
1366     ierr = PetscSectionGetDof(*section, rank, &numPoints);CHKERRQ(ierr);
1367     ierr = PetscSectionGetOffset(*section, rank, &offset);CHKERRQ(ierr);
1368     ierr = PetscMemcpy(&allPoints[offset], packPoints, numPoints * sizeof(PetscInt));CHKERRQ(ierr);
1369     packPoints += numPoints;
1370   }
1371 
1372   ierr = PetscSegBufferDestroy(&segpack);CHKERRQ(ierr);
1373   ierr = ISRestoreIndices(pointPartition, &partArray);CHKERRQ(ierr);
1374   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), newSize, allPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1375   PetscFunctionReturn(0);
1376 }
1377 
1378 #undef __FUNCT__
1379 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1380 /*@
1381   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1382 
1383   Input Parameters:
1384 + dm     - The DM
1385 - label  - DMLabel assinging ranks to remote roots
1386 
1387   Level: developer
1388 
1389 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1390 @*/
1391 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1392 {
1393   IS              rankIS,   pointIS;
1394   const PetscInt *ranks,   *points;
1395   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1396   PetscInt       *closure = NULL;
1397   PetscErrorCode  ierr;
1398 
1399   PetscFunctionBegin;
1400   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1401   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1402   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1403   for (r = 0; r < numRanks; ++r) {
1404     const PetscInt rank = ranks[r];
1405 
1406     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1407     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1408     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1409     for (p = 0; p < numPoints; ++p) {
1410       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1411       for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);}
1412     }
1413     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1414     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1415   }
1416   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1417   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1418   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1419   PetscFunctionReturn(0);
1420 }
1421 
1422 #undef __FUNCT__
1423 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1424 /*@
1425   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1426 
1427   Input Parameters:
1428 + dm     - The DM
1429 - label  - DMLabel assinging ranks to remote roots
1430 
1431   Level: developer
1432 
1433 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1434 @*/
1435 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1436 {
1437   IS              rankIS,   pointIS;
1438   const PetscInt *ranks,   *points;
1439   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1440   PetscInt       *adj = NULL;
1441   PetscErrorCode  ierr;
1442 
1443   PetscFunctionBegin;
1444   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1445   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1446   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1447   for (r = 0; r < numRanks; ++r) {
1448     const PetscInt rank = ranks[r];
1449 
1450     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1451     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1452     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1453     for (p = 0; p < numPoints; ++p) {
1454       adjSize = PETSC_DETERMINE;
1455       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1456       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1457     }
1458     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1459     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1460   }
1461   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1462   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1463   ierr = PetscFree(adj);CHKERRQ(ierr);
1464   PetscFunctionReturn(0);
1465 }
1466 
1467 #undef __FUNCT__
1468 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1469 /*@
1470   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1471 
1472   Input Parameters:
1473 + dm        - The DM
1474 . rootLabel - DMLabel assinging ranks to local roots
1475 . processSF - A star forest mapping into the local index on each remote rank
1476 
1477   Output Parameter:
1478 - leafLabel - DMLabel assinging ranks to remote roots
1479 
1480   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1481   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1482 
1483   Level: developer
1484 
1485 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1486 @*/
1487 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1488 {
1489   MPI_Comm           comm;
1490   PetscMPIInt        rank, numProcs;
1491   PetscInt           p, n, numNeighbors, size, l, nleaves;
1492   PetscSF            sfPoint;
1493   PetscSFNode       *rootPoints, *leafPoints;
1494   PetscSection       rootSection, leafSection;
1495   const PetscSFNode *remote;
1496   const PetscInt    *local, *neighbors;
1497   IS                 valueIS;
1498   PetscErrorCode     ierr;
1499 
1500   PetscFunctionBegin;
1501   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1502   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1503   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1504   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1505 
1506   /* Convert to (point, rank) and use actual owners */
1507   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1508   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1509   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1510   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1511   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1512   for (n = 0; n < numNeighbors; ++n) {
1513     PetscInt numPoints;
1514 
1515     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1516     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1517   }
1518   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1519   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1520   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1521   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1522   for (n = 0; n < numNeighbors; ++n) {
1523     IS              pointIS;
1524     const PetscInt *points;
1525     PetscInt        off, numPoints, p;
1526 
1527     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1528     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1529     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1530     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1531     for (p = 0; p < numPoints; ++p) {
1532       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1533       else       {l = -1;}
1534       if (l >= 0) {rootPoints[off+p] = remote[l];}
1535       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1536     }
1537     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1538     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1539   }
1540   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1541   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1542   /* Communicate overlap */
1543   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1544   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1545   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1546   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1547   for (p = 0; p < size; p++) {
1548     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1549   }
1550   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1551   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1552   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1553   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1554   PetscFunctionReturn(0);
1555 }
1556 
1557 #undef __FUNCT__
1558 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1559 /*@
1560   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1561 
1562   Input Parameters:
1563 + dm    - The DM
1564 . label - DMLabel assinging ranks to remote roots
1565 
1566   Output Parameter:
1567 - sf    - The star forest communication context encapsulating the defined mapping
1568 
1569   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1570 
1571   Level: developer
1572 
1573 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1574 @*/
1575 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1576 {
1577   PetscMPIInt    numProcs;
1578   PetscInt       n, idx, numRemote, p, numPoints, pStart, pEnd;
1579   PetscSFNode   *remotePoints;
1580   PetscErrorCode ierr;
1581 
1582   PetscFunctionBegin;
1583   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1584 
1585   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1586     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1587     numRemote += numPoints;
1588   }
1589   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1590   for (idx = 0, n = 0; n < numProcs; ++n) {
1591     IS remoteRootIS;
1592     const PetscInt *remoteRoots;
1593     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1594     if (numPoints <= 0) continue;
1595     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1596     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1597     for (p = 0; p < numPoints; p++) {
1598       remotePoints[idx].index = remoteRoots[p];
1599       remotePoints[idx].rank = n;
1600       idx++;
1601     }
1602     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1603     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1604   }
1605   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1606   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1607   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1608   PetscFunctionReturn(0);
1609 }
1610