xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 43f7d02bd9930d2dbb5d97d38e00dd4c722f7160)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc2(*numVertices+1, &vOffsets, size, adjacency);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   if (adjacency) {
111     ierr = PetscSegBufferExtractTo(adjBuffer, *adjacency);CHKERRQ(ierr);
112     if (nroots > 0) {
113       ISLocalToGlobalMapping ltogCells;
114       PetscInt n, size, *cells_arr;
115       /* In parallel, apply a global cell numbering to the graph */
116       ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117       ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118       ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119       ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120       /* Convert to positive global cell numbers */
121       for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122       ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123       ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], *adjacency, *adjacency);CHKERRQ(ierr);
124       ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     }
126   }
127   /* Clean up */
128   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
129   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
130   ierr = PetscFree(adj);CHKERRQ(ierr);
131   PetscFunctionReturn(0);
132 }
133 
134 #undef __FUNCT__
135 #define __FUNCT__ "DMPlexCreateNeighborCSR"
136 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
137 {
138   const PetscInt maxFaceCases = 30;
139   PetscInt       numFaceCases = 0;
140   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
141   PetscInt      *off, *adj;
142   PetscInt      *neighborCells = NULL;
143   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
144   PetscErrorCode ierr;
145 
146   PetscFunctionBegin;
147   /* For parallel partitioning, I think you have to communicate supports */
148   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
149   cellDim = dim - cellHeight;
150   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
151   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
152   if (cEnd - cStart == 0) {
153     if (numVertices) *numVertices = 0;
154     if (offsets)   *offsets   = NULL;
155     if (adjacency) *adjacency = NULL;
156     PetscFunctionReturn(0);
157   }
158   numCells  = cEnd - cStart;
159   faceDepth = depth - cellHeight;
160   if (dim == depth) {
161     PetscInt f, fStart, fEnd;
162 
163     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
164     /* Count neighboring cells */
165     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
166     for (f = fStart; f < fEnd; ++f) {
167       const PetscInt *support;
168       PetscInt        supportSize;
169       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
170       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
171       if (supportSize == 2) {
172         PetscInt numChildren;
173 
174         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
175         if (!numChildren) {
176           ++off[support[0]-cStart+1];
177           ++off[support[1]-cStart+1];
178         }
179       }
180     }
181     /* Prefix sum */
182     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
183     if (adjacency) {
184       PetscInt *tmp;
185 
186       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
187       ierr = PetscMalloc1((numCells+1), &tmp);CHKERRQ(ierr);
188       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
189       /* Get neighboring cells */
190       for (f = fStart; f < fEnd; ++f) {
191         const PetscInt *support;
192         PetscInt        supportSize;
193         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
194         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
195         if (supportSize == 2) {
196           PetscInt numChildren;
197 
198           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
199           if (!numChildren) {
200             adj[tmp[support[0]-cStart]++] = support[1];
201             adj[tmp[support[1]-cStart]++] = support[0];
202           }
203         }
204       }
205 #if defined(PETSC_USE_DEBUG)
206       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
207 #endif
208       ierr = PetscFree(tmp);CHKERRQ(ierr);
209     }
210     if (numVertices) *numVertices = numCells;
211     if (offsets)   *offsets   = off;
212     if (adjacency) *adjacency = adj;
213     PetscFunctionReturn(0);
214   }
215   /* Setup face recognition */
216   if (faceDepth == 1) {
217     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
218 
219     for (c = cStart; c < cEnd; ++c) {
220       PetscInt corners;
221 
222       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
223       if (!cornersSeen[corners]) {
224         PetscInt nFV;
225 
226         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
227         cornersSeen[corners] = 1;
228 
229         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
230 
231         numFaceVertices[numFaceCases++] = nFV;
232       }
233     }
234   }
235   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
236   /* Count neighboring cells */
237   for (cell = cStart; cell < cEnd; ++cell) {
238     PetscInt numNeighbors = PETSC_DETERMINE, n;
239 
240     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
241     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
242     for (n = 0; n < numNeighbors; ++n) {
243       PetscInt        cellPair[2];
244       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
245       PetscInt        meetSize = 0;
246       const PetscInt *meet    = NULL;
247 
248       cellPair[0] = cell; cellPair[1] = neighborCells[n];
249       if (cellPair[0] == cellPair[1]) continue;
250       if (!found) {
251         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
252         if (meetSize) {
253           PetscInt f;
254 
255           for (f = 0; f < numFaceCases; ++f) {
256             if (numFaceVertices[f] == meetSize) {
257               found = PETSC_TRUE;
258               break;
259             }
260           }
261         }
262         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
263       }
264       if (found) ++off[cell-cStart+1];
265     }
266   }
267   /* Prefix sum */
268   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
269 
270   if (adjacency) {
271     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
272     /* Get neighboring cells */
273     for (cell = cStart; cell < cEnd; ++cell) {
274       PetscInt numNeighbors = PETSC_DETERMINE, n;
275       PetscInt cellOffset   = 0;
276 
277       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
278       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
279       for (n = 0; n < numNeighbors; ++n) {
280         PetscInt        cellPair[2];
281         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
282         PetscInt        meetSize = 0;
283         const PetscInt *meet    = NULL;
284 
285         cellPair[0] = cell; cellPair[1] = neighborCells[n];
286         if (cellPair[0] == cellPair[1]) continue;
287         if (!found) {
288           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
289           if (meetSize) {
290             PetscInt f;
291 
292             for (f = 0; f < numFaceCases; ++f) {
293               if (numFaceVertices[f] == meetSize) {
294                 found = PETSC_TRUE;
295                 break;
296               }
297             }
298           }
299           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
300         }
301         if (found) {
302           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
303           ++cellOffset;
304         }
305       }
306     }
307   }
308   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
309   if (numVertices) *numVertices = numCells;
310   if (offsets)   *offsets   = off;
311   if (adjacency) *adjacency = adj;
312   PetscFunctionReturn(0);
313 }
314 
315 #undef __FUNCT__
316 #define __FUNCT__ "DMPlexEnlargePartition"
317 /* Expand the partition by BFS on the adjacency graph */
318 PetscErrorCode DMPlexEnlargePartition(DM dm, const PetscInt start[], const PetscInt adjacency[], PetscSection origPartSection, IS origPartition, PetscSection partSection, IS *partition)
319 {
320   PetscHashI      h;
321   const PetscInt *points;
322   PetscInt      **tmpPoints, *newPoints, totPoints = 0;
323   PetscInt        pStart, pEnd, part, q;
324   PetscBool       useCone;
325   PetscErrorCode  ierr;
326 
327   PetscFunctionBegin;
328   PetscHashICreate(h);
329   ierr = PetscSectionGetChart(origPartSection, &pStart, &pEnd);CHKERRQ(ierr);
330   ierr = PetscSectionSetChart(partSection, pStart, pEnd);CHKERRQ(ierr);
331   ierr = ISGetIndices(origPartition, &points);CHKERRQ(ierr);
332   ierr = PetscMalloc1((pEnd - pStart), &tmpPoints);CHKERRQ(ierr);
333   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
334   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
335   for (part = pStart; part < pEnd; ++part) {
336     PetscInt *adj = NULL;
337     PetscInt  numPoints, nP, numNewPoints, off, p, n = 0;
338 
339     PetscHashIClear(h);
340     ierr = PetscSectionGetDof(origPartSection, part, &numPoints);CHKERRQ(ierr);
341     ierr = PetscSectionGetOffset(origPartSection, part, &off);CHKERRQ(ierr);
342     /* Add all existing points to h */
343     for (p = 0; p < numPoints; ++p) {
344       const PetscInt point = points[off+p];
345       PetscHashIAdd(h, point, 1);
346     }
347     PetscHashISize(h, nP);
348     if (nP != numPoints) SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Invalid partition has %d points, but only %d were unique", numPoints, nP);
349     /* Add all points in next BFS level */
350     for (p = 0; p < numPoints; ++p) {
351       const PetscInt point   = points[off+p];
352       PetscInt       adjSize = PETSC_DETERMINE, a;
353 
354       ierr = DMPlexGetAdjacency(dm, point, &adjSize, &adj);CHKERRQ(ierr);
355       for (a = 0; a < adjSize; ++a) PetscHashIAdd(h, adj[a], 1);
356     }
357     PetscHashISize(h, numNewPoints);
358     ierr = PetscSectionSetDof(partSection, part, numNewPoints);CHKERRQ(ierr);
359     ierr = PetscMalloc1(numNewPoints, &tmpPoints[part]);CHKERRQ(ierr);
360     ierr = PetscHashIGetKeys(h, &n, tmpPoints[part]);CHKERRQ(ierr);
361     ierr = PetscFree(adj);CHKERRQ(ierr);
362     totPoints += numNewPoints;
363   }
364   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
365   ierr = ISRestoreIndices(origPartition, &points);CHKERRQ(ierr);
366   PetscHashIDestroy(h);
367   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
368   ierr = PetscMalloc1(totPoints, &newPoints);CHKERRQ(ierr);
369   for (part = pStart, q = 0; part < pEnd; ++part) {
370     PetscInt numPoints, p;
371 
372     ierr = PetscSectionGetDof(partSection, part, &numPoints);CHKERRQ(ierr);
373     for (p = 0; p < numPoints; ++p, ++q) newPoints[q] = tmpPoints[part][p];
374     ierr = PetscFree(tmpPoints[part]);CHKERRQ(ierr);
375   }
376   ierr = PetscFree(tmpPoints);CHKERRQ(ierr);
377   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), totPoints, newPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
378   PetscFunctionReturn(0);
379 }
380 
381 #undef __FUNCT__
382 #define __FUNCT__ "PetscPartitionerRegister"
383 /*@C
384   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
385 
386   Not Collective
387 
388   Input Parameters:
389 + name        - The name of a new user-defined creation routine
390 - create_func - The creation routine itself
391 
392   Notes:
393   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
394 
395   Sample usage:
396 .vb
397     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
398 .ve
399 
400   Then, your PetscPartitioner type can be chosen with the procedural interface via
401 .vb
402     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
403     PetscPartitionerSetType(PetscPartitioner, "my_part");
404 .ve
405    or at runtime via the option
406 .vb
407     -petscpartitioner_type my_part
408 .ve
409 
410   Level: advanced
411 
412 .keywords: PetscPartitioner, register
413 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
414 
415 @*/
416 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
417 {
418   PetscErrorCode ierr;
419 
420   PetscFunctionBegin;
421   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
422   PetscFunctionReturn(0);
423 }
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "PetscPartitionerSetType"
427 /*@C
428   PetscPartitionerSetType - Builds a particular PetscPartitioner
429 
430   Collective on PetscPartitioner
431 
432   Input Parameters:
433 + part - The PetscPartitioner object
434 - name - The kind of partitioner
435 
436   Options Database Key:
437 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
438 
439   Level: intermediate
440 
441 .keywords: PetscPartitioner, set, type
442 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
443 @*/
444 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
445 {
446   PetscErrorCode (*r)(PetscPartitioner);
447   PetscBool      match;
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
452   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
453   if (match) PetscFunctionReturn(0);
454 
455   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
456   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
457   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
458 
459   if (part->ops->destroy) {
460     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
461     part->ops->destroy = NULL;
462   }
463   ierr = (*r)(part);CHKERRQ(ierr);
464   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
465   PetscFunctionReturn(0);
466 }
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "PetscPartitionerGetType"
470 /*@C
471   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
472 
473   Not Collective
474 
475   Input Parameter:
476 . part - The PetscPartitioner
477 
478   Output Parameter:
479 . name - The PetscPartitioner type name
480 
481   Level: intermediate
482 
483 .keywords: PetscPartitioner, get, type, name
484 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
485 @*/
486 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
487 {
488   PetscErrorCode ierr;
489 
490   PetscFunctionBegin;
491   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
492   PetscValidPointer(name, 2);
493   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
494   *name = ((PetscObject) part)->type_name;
495   PetscFunctionReturn(0);
496 }
497 
498 #undef __FUNCT__
499 #define __FUNCT__ "PetscPartitionerView"
500 /*@C
501   PetscPartitionerView - Views a PetscPartitioner
502 
503   Collective on PetscPartitioner
504 
505   Input Parameter:
506 + part - the PetscPartitioner object to view
507 - v    - the viewer
508 
509   Level: developer
510 
511 .seealso: PetscPartitionerDestroy()
512 @*/
513 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
514 {
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
519   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
520   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "PetscPartitionerViewFromOptions"
526 /*
527   PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed.
528 
529   Collective on PetscPartitioner
530 
531   Input Parameters:
532 + part   - the PetscPartitioner
533 . prefix - prefix to use for viewing, or NULL
534 - optionname - option to activate viewing
535 
536   Level: intermediate
537 
538 .keywords: PetscPartitioner, view, options, database
539 .seealso: VecViewFromOptions(), MatViewFromOptions()
540 */
541 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[])
542 {
543   PetscViewer       viewer;
544   PetscViewerFormat format;
545   PetscBool         flg;
546   PetscErrorCode    ierr;
547 
548   PetscFunctionBegin;
549   if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix,                       optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
550   else        {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
551   if (flg) {
552     ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr);
553     ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr);
554     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
555     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
556   }
557   PetscFunctionReturn(0);
558 }
559 #undef __FUNCT__
560 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
561 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
562 {
563   const char    *defaultType;
564   char           name[256];
565   PetscBool      flg;
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
570   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
571   else                                  defaultType = ((PetscObject) part)->type_name;
572   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
573 
574   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
575   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
576   if (flg) {
577     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
578   } else if (!((PetscObject) part)->type_name) {
579     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
580   }
581   ierr = PetscOptionsEnd();CHKERRQ(ierr);
582   PetscFunctionReturn(0);
583 }
584 
585 #undef __FUNCT__
586 #define __FUNCT__ "PetscPartitionerSetFromOptions"
587 /*@
588   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
589 
590   Collective on PetscPartitioner
591 
592   Input Parameter:
593 . part - the PetscPartitioner object to set options for
594 
595   Level: developer
596 
597 .seealso: PetscPartitionerView()
598 @*/
599 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
600 {
601   PetscErrorCode ierr;
602 
603   PetscFunctionBegin;
604   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
605   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
606 
607   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
608   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
609   /* process any options handlers added with PetscObjectAddOptionsHandler() */
610   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
611   ierr = PetscOptionsEnd();CHKERRQ(ierr);
612   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
613   PetscFunctionReturn(0);
614 }
615 
616 #undef __FUNCT__
617 #define __FUNCT__ "PetscPartitionerSetUp"
618 /*@C
619   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
620 
621   Collective on PetscPartitioner
622 
623   Input Parameter:
624 . part - the PetscPartitioner object to setup
625 
626   Level: developer
627 
628 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
629 @*/
630 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
631 {
632   PetscErrorCode ierr;
633 
634   PetscFunctionBegin;
635   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
636   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
637   PetscFunctionReturn(0);
638 }
639 
640 #undef __FUNCT__
641 #define __FUNCT__ "PetscPartitionerDestroy"
642 /*@
643   PetscPartitionerDestroy - Destroys a PetscPartitioner object
644 
645   Collective on PetscPartitioner
646 
647   Input Parameter:
648 . part - the PetscPartitioner object to destroy
649 
650   Level: developer
651 
652 .seealso: PetscPartitionerView()
653 @*/
654 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
655 {
656   PetscErrorCode ierr;
657 
658   PetscFunctionBegin;
659   if (!*part) PetscFunctionReturn(0);
660   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
661 
662   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
663   ((PetscObject) (*part))->refct = 0;
664 
665   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
666   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
667   PetscFunctionReturn(0);
668 }
669 
670 #undef __FUNCT__
671 #define __FUNCT__ "PetscPartitionerCreate"
672 /*@
673   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
674 
675   Collective on MPI_Comm
676 
677   Input Parameter:
678 . comm - The communicator for the PetscPartitioner object
679 
680   Output Parameter:
681 . part - The PetscPartitioner object
682 
683   Level: beginner
684 
685 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL
686 @*/
687 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
688 {
689   PetscPartitioner p;
690   PetscErrorCode   ierr;
691 
692   PetscFunctionBegin;
693   PetscValidPointer(part, 2);
694   *part = NULL;
695   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
696 
697   ierr = PetscHeaderCreate(p, _p_PetscPartitioner, struct _PetscPartitionerOps, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
698   ierr = PetscMemzero(p->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
699 
700   *part = p;
701   PetscFunctionReturn(0);
702 }
703 
704 #undef __FUNCT__
705 #define __FUNCT__ "PetscPartitionerPartition"
706 /*@
707   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
708 
709   Collective on DM
710 
711   Input Parameters:
712 + part    - The PetscPartitioner
713 - dm      - The mesh DM
714 
715   Output Parameters:
716 + partSection     - The PetscSection giving the division of points by partition
717 - partition       - The list of points by partition
718 
719   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
720 
721   Level: developer
722 
723 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
724 @*/
725 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
726 {
727   PetscMPIInt    size;
728   PetscErrorCode ierr;
729 
730   PetscFunctionBegin;
731   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
732   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
733   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
734   PetscValidPointer(partition, 5);
735   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
736   if (size == 1) {
737     PetscInt *points;
738     PetscInt  cStart, cEnd, c;
739 
740     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
741     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
742     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
743     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
744     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
745     for (c = cStart; c < cEnd; ++c) points[c] = c;
746     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
747     PetscFunctionReturn(0);
748   }
749   if (part->height == 0) {
750     PetscInt  numVertices;
751     PetscInt *start     = NULL;
752     PetscInt *adjacency = NULL;
753 
754     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
755     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
756     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
757     ierr = PetscFree(start);CHKERRQ(ierr);
758     ierr = PetscFree(adjacency);CHKERRQ(ierr);
759   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
765 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
766 {
767   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
768   PetscErrorCode          ierr;
769 
770   PetscFunctionBegin;
771   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
772   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
773   ierr = PetscFree(p);CHKERRQ(ierr);
774   PetscFunctionReturn(0);
775 }
776 
777 #undef __FUNCT__
778 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
779 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
780 {
781   PetscViewerFormat format;
782   PetscErrorCode    ierr;
783 
784   PetscFunctionBegin;
785   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
786   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "PetscPartitionerView_Shell"
792 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
793 {
794   PetscBool      iascii;
795   PetscErrorCode ierr;
796 
797   PetscFunctionBegin;
798   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
799   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
800   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
801   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
802   PetscFunctionReturn(0);
803 }
804 
805 #undef __FUNCT__
806 #define __FUNCT__ "PetscPartitionerPartition_Shell"
807 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
808 {
809   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
810   PetscInt                np;
811   PetscErrorCode          ierr;
812 
813   PetscFunctionBegin;
814   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
815   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
816   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
817   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
818   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
819   *partition = p->partition;
820   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
821   PetscFunctionReturn(0);
822 }
823 
824 #undef __FUNCT__
825 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
826 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
827 {
828   PetscFunctionBegin;
829   part->ops->view      = PetscPartitionerView_Shell;
830   part->ops->destroy   = PetscPartitionerDestroy_Shell;
831   part->ops->partition = PetscPartitionerPartition_Shell;
832   PetscFunctionReturn(0);
833 }
834 
835 /*MC
836   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
837 
838   Level: intermediate
839 
840 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
841 M*/
842 
843 #undef __FUNCT__
844 #define __FUNCT__ "PetscPartitionerCreate_Shell"
845 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
846 {
847   PetscPartitioner_Shell *p;
848   PetscErrorCode          ierr;
849 
850   PetscFunctionBegin;
851   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
852   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
853   part->data = p;
854 
855   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
856   PetscFunctionReturn(0);
857 }
858 
859 #undef __FUNCT__
860 #define __FUNCT__ "PetscPartitionerShellSetPartition"
861 /*@C
862   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
863 
864   Collective on DM
865 
866   Input Parameters:
867 + part    - The PetscPartitioner
868 . dm      - The mesh DM
869 - enlarge - Expand each partition with neighbors
870 
871   Level: developer
872 
873 .seealso DMPlexDistribute(), PetscPartitionerCreate()
874 @*/
875 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
876 {
877   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
878   PetscInt                proc, numPoints;
879   PetscErrorCode          ierr;
880 
881   PetscFunctionBegin;
882   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
883   if (sizes) {PetscValidPointer(sizes, 3);}
884   if (sizes) {PetscValidPointer(points, 4);}
885   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
886   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
887   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
888   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
889   if (sizes) {
890     for (proc = 0; proc < numProcs; ++proc) {
891       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
892     }
893   }
894   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
895   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
896   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
897   PetscFunctionReturn(0);
898 }
899 
900 #undef __FUNCT__
901 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
902 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
903 {
904   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
905   PetscErrorCode          ierr;
906 
907   PetscFunctionBegin;
908   ierr = PetscFree(p);CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 #undef __FUNCT__
913 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
914 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
915 {
916   PetscViewerFormat format;
917   PetscErrorCode    ierr;
918 
919   PetscFunctionBegin;
920   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
921   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 #undef __FUNCT__
926 #define __FUNCT__ "PetscPartitionerView_Chaco"
927 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
928 {
929   PetscBool      iascii;
930   PetscErrorCode ierr;
931 
932   PetscFunctionBegin;
933   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
934   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
935   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
936   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
937   PetscFunctionReturn(0);
938 }
939 
940 #if defined(PETSC_HAVE_CHACO)
941 #if defined(PETSC_HAVE_UNISTD_H)
942 #include <unistd.h>
943 #endif
944 /* Chaco does not have an include file */
945 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
946                        float *ewgts, float *x, float *y, float *z, char *outassignname,
947                        char *outfilename, short *assignment, int architecture, int ndims_tot,
948                        int mesh_dims[3], double *goal, int global_method, int local_method,
949                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
950 
951 extern int FREE_GRAPH;
952 #endif
953 
954 #undef __FUNCT__
955 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
956 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
957 {
958 #if defined(PETSC_HAVE_CHACO)
959   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
960   MPI_Comm       comm;
961   int            nvtxs          = numVertices; /* number of vertices in full graph */
962   int           *vwgts          = NULL;   /* weights for all vertices */
963   float         *ewgts          = NULL;   /* weights for all edges */
964   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
965   char          *outassignname  = NULL;   /*  name of assignment output file */
966   char          *outfilename    = NULL;   /* output file name */
967   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
968   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
969   int            mesh_dims[3];            /* dimensions of mesh of processors */
970   double        *goal          = NULL;    /* desired set sizes for each set */
971   int            global_method = 1;       /* global partitioning algorithm */
972   int            local_method  = 1;       /* local partitioning algorithm */
973   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
974   int            vmax          = 200;     /* how many vertices to coarsen down to? */
975   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
976   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
977   long           seed          = 123636512; /* for random graph mutations */
978   short int     *assignment;              /* Output partition */
979   int            fd_stdout, fd_pipe[2];
980   PetscInt      *points;
981   int            i, v, p;
982   PetscErrorCode ierr;
983 
984   PetscFunctionBegin;
985   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
986   if (!numVertices) {
987     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
988     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
989     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
990     PetscFunctionReturn(0);
991   }
992   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
993   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
994 
995   if (global_method == INERTIAL_METHOD) {
996     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
997     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
998   }
999   mesh_dims[0] = nparts;
1000   mesh_dims[1] = 1;
1001   mesh_dims[2] = 1;
1002   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1003   /* Chaco outputs to stdout. We redirect this to a buffer. */
1004   /* TODO: check error codes for UNIX calls */
1005 #if defined(PETSC_HAVE_UNISTD_H)
1006   {
1007     int piperet;
1008     piperet = pipe(fd_pipe);
1009     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1010     fd_stdout = dup(1);
1011     close(1);
1012     dup2(fd_pipe[1], 1);
1013   }
1014 #endif
1015   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1016                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1017                    vmax, ndims, eigtol, seed);
1018 #if defined(PETSC_HAVE_UNISTD_H)
1019   {
1020     char msgLog[10000];
1021     int  count;
1022 
1023     fflush(stdout);
1024     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1025     if (count < 0) count = 0;
1026     msgLog[count] = 0;
1027     close(1);
1028     dup2(fd_stdout, 1);
1029     close(fd_stdout);
1030     close(fd_pipe[0]);
1031     close(fd_pipe[1]);
1032     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1033   }
1034 #endif
1035   /* Convert to PetscSection+IS */
1036   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1037   for (v = 0; v < nvtxs; ++v) {
1038     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1039   }
1040   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1041   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1042   for (p = 0, i = 0; p < nparts; ++p) {
1043     for (v = 0; v < nvtxs; ++v) {
1044       if (assignment[v] == p) points[i++] = v;
1045     }
1046   }
1047   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1048   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1049   if (global_method == INERTIAL_METHOD) {
1050     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1051   }
1052   ierr = PetscFree(assignment);CHKERRQ(ierr);
1053   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1054   PetscFunctionReturn(0);
1055 #else
1056   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1057 #endif
1058 }
1059 
1060 #undef __FUNCT__
1061 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1062 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1063 {
1064   PetscFunctionBegin;
1065   part->ops->view      = PetscPartitionerView_Chaco;
1066   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1067   part->ops->partition = PetscPartitionerPartition_Chaco;
1068   PetscFunctionReturn(0);
1069 }
1070 
1071 /*MC
1072   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1073 
1074   Level: intermediate
1075 
1076 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1077 M*/
1078 
1079 #undef __FUNCT__
1080 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1081 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1082 {
1083   PetscPartitioner_Chaco *p;
1084   PetscErrorCode          ierr;
1085 
1086   PetscFunctionBegin;
1087   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1088   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1089   part->data = p;
1090 
1091   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1092   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 #undef __FUNCT__
1097 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1098 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1099 {
1100   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1101   PetscErrorCode             ierr;
1102 
1103   PetscFunctionBegin;
1104   ierr = PetscFree(p);CHKERRQ(ierr);
1105   PetscFunctionReturn(0);
1106 }
1107 
1108 #undef __FUNCT__
1109 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1110 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1111 {
1112   PetscViewerFormat format;
1113   PetscErrorCode    ierr;
1114 
1115   PetscFunctionBegin;
1116   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1117   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 #undef __FUNCT__
1122 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1123 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1124 {
1125   PetscBool      iascii;
1126   PetscErrorCode ierr;
1127 
1128   PetscFunctionBegin;
1129   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1130   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1131   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1132   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1133   PetscFunctionReturn(0);
1134 }
1135 
1136 #if defined(PETSC_HAVE_PARMETIS)
1137 #include <parmetis.h>
1138 #endif
1139 
1140 #undef __FUNCT__
1141 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1142 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1143 {
1144 #if defined(PETSC_HAVE_PARMETIS)
1145   MPI_Comm       comm;
1146   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1147   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1148   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1149   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1150   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1151   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1152   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1153   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1154   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1155   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1156   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1157   PetscInt       options[5];               /* Options */
1158   /* Outputs */
1159   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1160   PetscInt      *assignment, *points;
1161   PetscMPIInt    rank, p, v, i;
1162   PetscErrorCode ierr;
1163 
1164   PetscFunctionBegin;
1165   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1166   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1167   options[0] = 0; /* Use all defaults */
1168   /* Calculate vertex distribution */
1169   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1170   vtxdist[0] = 0;
1171   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1172   for (p = 2; p <= nparts; ++p) {
1173     vtxdist[p] += vtxdist[p-1];
1174   }
1175   /* Calculate weights */
1176   for (p = 0; p < nparts; ++p) {
1177     tpwgts[p] = 1.0/nparts;
1178   }
1179   ubvec[0] = 1.05;
1180 
1181   if (nparts == 1) {
1182     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1183   } else {
1184     if (vtxdist[1] == vtxdist[nparts]) {
1185       if (!rank) {
1186         PetscStackPush("METIS_PartGraphKway");
1187         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1188         PetscStackPop;
1189         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1190       }
1191     } else {
1192       PetscStackPush("ParMETIS_V3_PartKway");
1193       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1194       PetscStackPop;
1195       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1196     }
1197   }
1198   /* Convert to PetscSection+IS */
1199   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1200   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1201   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1202   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1203   for (p = 0, i = 0; p < nparts; ++p) {
1204     for (v = 0; v < nvtxs; ++v) {
1205       if (assignment[v] == p) points[i++] = v;
1206     }
1207   }
1208   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1209   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1210   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1211 #else
1212   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1213 #endif
1214   PetscFunctionReturn(0);
1215 }
1216 
1217 #undef __FUNCT__
1218 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1219 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1220 {
1221   PetscFunctionBegin;
1222   part->ops->view      = PetscPartitionerView_ParMetis;
1223   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1224   part->ops->partition = PetscPartitionerPartition_ParMetis;
1225   PetscFunctionReturn(0);
1226 }
1227 
1228 /*MC
1229   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1230 
1231   Level: intermediate
1232 
1233 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1234 M*/
1235 
1236 #undef __FUNCT__
1237 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1238 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1239 {
1240   PetscPartitioner_ParMetis *p;
1241   PetscErrorCode          ierr;
1242 
1243   PetscFunctionBegin;
1244   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1245   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1246   part->data = p;
1247 
1248   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1249   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1250   PetscFunctionReturn(0);
1251 }
1252 
1253 #undef __FUNCT__
1254 #define __FUNCT__ "DMPlexGetPartitioner"
1255 /*@
1256   DMPlexGetPartitioner - Get the mesh partitioner
1257 
1258   Not collective
1259 
1260   Input Parameter:
1261 . dm - The DM
1262 
1263   Output Parameter:
1264 . part - The PetscPartitioner
1265 
1266   Level: developer
1267 
1268 .seealso DMPlexDistribute(), PetscPartitionerCreate()
1269 @*/
1270 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1271 {
1272   DM_Plex *mesh = (DM_Plex *) dm->data;
1273 
1274   PetscFunctionBegin;
1275   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1276   PetscValidPointer(part, 2);
1277   *part = mesh->partitioner;
1278   PetscFunctionReturn(0);
1279 }
1280 
1281 #undef __FUNCT__
1282 #define __FUNCT__ "DMPlexMarkTreeClosure"
1283 static PetscErrorCode DMPlexMarkTreeClosure(DM dm, PetscSegBuffer segpart, PetscBT bt, PetscInt point, PetscInt *partSize)
1284 {
1285   PetscInt       parent, closureSize, *closure = NULL, i, pStart, pEnd;
1286   PetscErrorCode ierr;
1287 
1288   PetscFunctionBegin;
1289   ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1290   if (parent == point) PetscFunctionReturn(0);
1291   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1292   ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1293   for (i = 0; i < closureSize; i++) {
1294     PetscInt cpoint = closure[2*i];
1295 
1296     if (!PetscBTLookupSet(bt,cpoint-pStart)) {
1297       PetscInt *PETSC_RESTRICT pt;
1298       (*partSize)++;
1299       ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr);
1300       *pt = cpoint;
1301     }
1302     ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,partSize);CHKERRQ(ierr);
1303   }
1304   ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1305   PetscFunctionReturn(0);
1306 }
1307 
1308 #undef __FUNCT__
1309 #define __FUNCT__ "DMPlexCreatePartitionClosure"
1310 PetscErrorCode DMPlexCreatePartitionClosure(DM dm, PetscSection pointSection, IS pointPartition, PetscSection *section, IS *partition)
1311 {
1312   /* const PetscInt  height = 0; */
1313   const PetscInt *partArray;
1314   PetscInt       *allPoints, *packPoints;
1315   PetscInt        rStart, rEnd, rank, pStart, pEnd, newSize;
1316   PetscErrorCode  ierr;
1317   PetscBT         bt;
1318   PetscSegBuffer  segpack,segpart;
1319 
1320   PetscFunctionBegin;
1321   ierr = PetscSectionGetChart(pointSection, &rStart, &rEnd);CHKERRQ(ierr);
1322   ierr = ISGetIndices(pointPartition, &partArray);CHKERRQ(ierr);
1323   ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), section);CHKERRQ(ierr);
1324   ierr = PetscSectionSetChart(*section, rStart, rEnd);CHKERRQ(ierr);
1325   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1326   ierr = PetscBTCreate(pEnd-pStart,&bt);CHKERRQ(ierr);
1327   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpack);CHKERRQ(ierr);
1328   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&segpart);CHKERRQ(ierr);
1329   for (rank = rStart; rank < rEnd; ++rank) {
1330     PetscInt partSize = 0, numPoints, offset, p, *PETSC_RESTRICT placePoints;
1331 
1332     ierr = PetscSectionGetDof(pointSection, rank, &numPoints);CHKERRQ(ierr);
1333     ierr = PetscSectionGetOffset(pointSection, rank, &offset);CHKERRQ(ierr);
1334     for (p = 0; p < numPoints; ++p) {
1335       PetscInt  point   = partArray[offset+p], closureSize, c;
1336       PetscInt *closure = NULL;
1337 
1338       /* TODO Include support for height > 0 case */
1339       ierr = DMPlexGetTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1340       for (c=0; c<closureSize; c++) {
1341         PetscInt cpoint = closure[c*2];
1342         if (!PetscBTLookupSet(bt,cpoint-pStart)) {
1343           PetscInt *PETSC_RESTRICT pt;
1344           partSize++;
1345           ierr = PetscSegBufferGetInts(segpart,1,&pt);CHKERRQ(ierr);
1346           *pt = cpoint;
1347         }
1348         ierr = DMPlexMarkTreeClosure(dm,segpart,bt,cpoint,&partSize);CHKERRQ(ierr);
1349       }
1350       ierr = DMPlexRestoreTransitiveClosure(dm, point, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1351     }
1352     ierr = PetscSectionSetDof(*section, rank, partSize);CHKERRQ(ierr);
1353     ierr = PetscSegBufferGetInts(segpack,partSize,&placePoints);CHKERRQ(ierr);
1354     ierr = PetscSegBufferExtractTo(segpart,placePoints);CHKERRQ(ierr);
1355     ierr = PetscSortInt(partSize,placePoints);CHKERRQ(ierr);
1356     for (p=0; p<partSize; p++) {ierr = PetscBTClear(bt,placePoints[p]-pStart);CHKERRQ(ierr);}
1357   }
1358   ierr = PetscBTDestroy(&bt);CHKERRQ(ierr);
1359   ierr = PetscSegBufferDestroy(&segpart);CHKERRQ(ierr);
1360 
1361   ierr = PetscSectionSetUp(*section);CHKERRQ(ierr);
1362   ierr = PetscSectionGetStorageSize(*section, &newSize);CHKERRQ(ierr);
1363   ierr = PetscMalloc1(newSize, &allPoints);CHKERRQ(ierr);
1364 
1365   ierr = PetscSegBufferExtractInPlace(segpack,&packPoints);CHKERRQ(ierr);
1366   for (rank = rStart; rank < rEnd; ++rank) {
1367     PetscInt numPoints, offset;
1368 
1369     ierr = PetscSectionGetDof(*section, rank, &numPoints);CHKERRQ(ierr);
1370     ierr = PetscSectionGetOffset(*section, rank, &offset);CHKERRQ(ierr);
1371     ierr = PetscMemcpy(&allPoints[offset], packPoints, numPoints * sizeof(PetscInt));CHKERRQ(ierr);
1372     packPoints += numPoints;
1373   }
1374 
1375   ierr = PetscSegBufferDestroy(&segpack);CHKERRQ(ierr);
1376   ierr = ISRestoreIndices(pointPartition, &partArray);CHKERRQ(ierr);
1377   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), newSize, allPoints, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1378   PetscFunctionReturn(0);
1379 }
1380 
1381 #undef __FUNCT__
1382 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1383 /*@
1384   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1385 
1386   Input Parameters:
1387 + dm     - The DM
1388 - label  - DMLabel assinging ranks to remote roots
1389 
1390   Level: developer
1391 
1392 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1393 @*/
1394 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1395 {
1396   IS              rankIS,   pointIS;
1397   const PetscInt *ranks,   *points;
1398   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1399   PetscInt       *closure = NULL;
1400   PetscErrorCode  ierr;
1401 
1402   PetscFunctionBegin;
1403   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1404   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1405   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1406   for (r = 0; r < numRanks; ++r) {
1407     const PetscInt rank = ranks[r];
1408 
1409     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1410     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1411     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1412     for (p = 0; p < numPoints; ++p) {
1413       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1414       for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);}
1415     }
1416     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1417     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1418   }
1419   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1420   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1421   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 #undef __FUNCT__
1426 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1427 /*@
1428   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1429 
1430   Input Parameters:
1431 + dm     - The DM
1432 - label  - DMLabel assinging ranks to remote roots
1433 
1434   Level: developer
1435 
1436 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1437 @*/
1438 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1439 {
1440   IS              rankIS,   pointIS;
1441   const PetscInt *ranks,   *points;
1442   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1443   PetscInt       *adj = NULL;
1444   PetscErrorCode  ierr;
1445 
1446   PetscFunctionBegin;
1447   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1448   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1449   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1450   for (r = 0; r < numRanks; ++r) {
1451     const PetscInt rank = ranks[r];
1452 
1453     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1454     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1455     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1456     for (p = 0; p < numPoints; ++p) {
1457       adjSize = PETSC_DETERMINE;
1458       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1459       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1460     }
1461     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1462     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1463   }
1464   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1465   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1466   ierr = PetscFree(adj);CHKERRQ(ierr);
1467   PetscFunctionReturn(0);
1468 }
1469 
1470 #undef __FUNCT__
1471 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1472 /*@
1473   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1474 
1475   Input Parameters:
1476 + dm        - The DM
1477 . rootLabel - DMLabel assinging ranks to local roots
1478 . processSF - A star forest mapping into the local index on each remote rank
1479 
1480   Output Parameter:
1481 - leafLabel - DMLabel assinging ranks to remote roots
1482 
1483   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1484   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1485 
1486   Level: developer
1487 
1488 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1489 @*/
1490 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1491 {
1492   MPI_Comm           comm;
1493   PetscMPIInt        rank, numProcs;
1494   PetscInt           p, n, numNeighbors, size, l, nleaves;
1495   PetscSF            sfPoint;
1496   PetscSFNode       *rootPoints, *leafPoints;
1497   PetscSection       rootSection, leafSection;
1498   const PetscSFNode *remote;
1499   const PetscInt    *local, *neighbors;
1500   IS                 valueIS;
1501   PetscErrorCode     ierr;
1502 
1503   PetscFunctionBegin;
1504   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1505   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1506   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1507   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1508 
1509   /* Convert to (point, rank) and use actual owners */
1510   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1511   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1512   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1513   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1514   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1515   for (n = 0; n < numNeighbors; ++n) {
1516     PetscInt numPoints;
1517 
1518     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1519     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1520   }
1521   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1522   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1523   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1524   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1525   for (n = 0; n < numNeighbors; ++n) {
1526     IS              pointIS;
1527     const PetscInt *points;
1528     PetscInt        off, numPoints, p;
1529 
1530     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1531     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1532     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1533     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1534     for (p = 0; p < numPoints; ++p) {
1535       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1536       else       {l = -1;}
1537       if (l >= 0) {rootPoints[off+p] = remote[l];}
1538       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1539     }
1540     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1541     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1542   }
1543   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1544   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1545   /* Communicate overlap */
1546   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1547   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1548   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1549   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1550   for (p = 0; p < size; p++) {
1551     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1552   }
1553   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1554   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1555   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1556   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1557   PetscFunctionReturn(0);
1558 }
1559 
1560 #undef __FUNCT__
1561 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1562 /*@
1563   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1564 
1565   Input Parameters:
1566 + dm    - The DM
1567 . label - DMLabel assinging ranks to remote roots
1568 
1569   Output Parameter:
1570 - sf    - The star forest communication context encapsulating the defined mapping
1571 
1572   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1573 
1574   Level: developer
1575 
1576 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1577 @*/
1578 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1579 {
1580   PetscMPIInt     rank, numProcs;
1581   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1582   PetscSFNode    *remotePoints;
1583   IS              remoteRootIS;
1584   const PetscInt *remoteRoots;
1585   PetscErrorCode ierr;
1586 
1587   PetscFunctionBegin;
1588   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1589   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1590 
1591   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1592     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1593     numRemote += numPoints;
1594   }
1595   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1596   /* Put owned points first */
1597   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1598   if (numPoints > 0) {
1599     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1600     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1601     for (p = 0; p < numPoints; p++) {
1602       remotePoints[idx].index = remoteRoots[p];
1603       remotePoints[idx].rank = rank;
1604       idx++;
1605     }
1606     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1607     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1608   }
1609   /* Now add remote points */
1610   for (n = 0; n < numProcs; ++n) {
1611     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1612     if (numPoints <= 0 || n == rank) continue;
1613     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1614     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1615     for (p = 0; p < numPoints; p++) {
1616       remotePoints[idx].index = remoteRoots[p];
1617       remotePoints[idx].rank = n;
1618       idx++;
1619     }
1620     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1621     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1622   }
1623   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1624   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1625   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1626   PetscFunctionReturn(0);
1627 }
1628