xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 0134a67b057322d4851f6269ae24f60bfc3170ec)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }
31 
32 /*@C
33   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
34 
35   Input Parameters:
36 + dm      - The mesh DM dm
37 - height  - Height of the strata from which to construct the graph
38 
39   Output Parameter:
40 + numVertices     - Number of vertices in the graph
41 . offsets         - Point offsets in the graph
42 . adjacency       - Point connectivity in the graph
43 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
44 
45   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
46   representation on the mesh.
47 
48   Level: developer
49 
50 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
51 @*/
52 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
53 {
54   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
55   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
56   IS             cellNumbering;
57   const PetscInt *cellNum;
58   PetscBool      useCone, useClosure;
59   PetscSection   section;
60   PetscSegBuffer adjBuffer;
61   PetscSF        sfPoint;
62   PetscInt       *adjCells = NULL, *remoteCells = NULL;
63   const PetscInt *local;
64   PetscInt       nroots, nleaves, l;
65   PetscMPIInt    rank;
66   PetscErrorCode ierr;
67 
68   PetscFunctionBegin;
69   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
70   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
71   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
72   if (dim != depth) {
73     /* We do not handle the uninterpolated case here */
74     ierr = DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);CHKERRQ(ierr);
75     /* DMPlexCreateNeighborCSR does not make a numbering */
76     if (globalNumbering) {ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);CHKERRQ(ierr);}
77     /* Different behavior for empty graphs */
78     if (!*numVertices) {
79       ierr = PetscMalloc1(1, offsets);CHKERRQ(ierr);
80       (*offsets)[0] = 0;
81     }
82     /* Broken in parallel */
83     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
84     PetscFunctionReturn(0);
85   }
86   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
87   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
88   /* Build adjacency graph via a section/segbuffer */
89   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
90   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
91   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
92   /* Always use FVM adjacency to create partitioner graph */
93   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
94   ierr = DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);CHKERRQ(ierr);
95   ierr = DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);CHKERRQ(ierr);
96   if (globalNumbering) {
97     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
98     *globalNumbering = cellNumbering;
99   }
100   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
101   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
102      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
103    */
104   ierr = PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
105   if (nroots >= 0) {
106     PetscInt fStart, fEnd, f;
107 
108     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
109     ierr = DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);CHKERRQ(ierr);
110     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
111     for (f = fStart; f < fEnd; ++f) {
112       const PetscInt *support;
113       PetscInt        supportSize;
114 
115       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
116       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
117       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
118       else if (supportSize == 2) {
119         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
120         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
121         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
122         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
123       }
124       /* Handle non-conforming meshes */
125       if (supportSize > 2) {
126         PetscInt        numChildren, i;
127         const PetscInt *children;
128 
129         ierr = DMPlexGetTreeChildren(dm, f, &numChildren, &children);CHKERRQ(ierr);
130         for (i = 0; i < numChildren; ++i) {
131           const PetscInt child = children[i];
132           if (fStart <= child && child < fEnd) {
133             ierr = DMPlexGetSupport(dm, child, &support);CHKERRQ(ierr);
134             ierr = DMPlexGetSupportSize(dm, child, &supportSize);CHKERRQ(ierr);
135             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
136             else if (supportSize == 2) {
137               ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
138               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
139               ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
140               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
141             }
142           }
143         }
144       }
145     }
146     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
147     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
148     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
149     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
150     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
151   }
152   /* Combine local and global adjacencies */
153   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
154     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
155     if (nroots > 0) {if (cellNum[p] < 0) continue;}
156     /* Add remote cells */
157     if (remoteCells) {
158       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
159       PetscInt       coneSize, numChildren, c, i;
160       const PetscInt *cone, *children;
161 
162       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
163       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
164       for (c = 0; c < coneSize; ++c) {
165         const PetscInt point = cone[c];
166         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
167           PetscInt *PETSC_RESTRICT pBuf;
168           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
169           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
170           *pBuf = remoteCells[point];
171         }
172         /* Handle non-conforming meshes */
173         ierr = DMPlexGetTreeChildren(dm, point, &numChildren, &children);CHKERRQ(ierr);
174         for (i = 0; i < numChildren; ++i) {
175           const PetscInt child = children[i];
176           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
177             PetscInt *PETSC_RESTRICT pBuf;
178             ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
179             ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
180             *pBuf = remoteCells[child];
181           }
182         }
183       }
184     }
185     /* Add local cells */
186     adjSize = PETSC_DETERMINE;
187     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
188     for (a = 0; a < adjSize; ++a) {
189       const PetscInt point = adj[a];
190       if (point != p && pStart <= point && point < pEnd) {
191         PetscInt *PETSC_RESTRICT pBuf;
192         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
193         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
194         *pBuf = DMPlex_GlobalID(cellNum[point]);
195       }
196     }
197     (*numVertices)++;
198   }
199   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
200   ierr = DMSetBasicAdjacency(dm, useCone, useClosure);CHKERRQ(ierr);
201   /* Derive CSR graph from section/segbuffer */
202   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
203   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
204   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
205   for (idx = 0, p = pStart; p < pEnd; p++) {
206     if (nroots > 0) {if (cellNum[p] < 0) continue;}
207     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
208   }
209   vOffsets[*numVertices] = size;
210   if (offsets) *offsets = vOffsets;
211   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
212   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
213   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
214   if (adjacency) *adjacency = graph;
215   /* Clean up */
216   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
217   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
218   ierr = PetscFree(adj);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 /*@C
223   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
224 
225   Collective
226 
227   Input Arguments:
228 + dm - The DMPlex
229 - cellHeight - The height of mesh points to treat as cells (default should be 0)
230 
231   Output Arguments:
232 + numVertices - The number of local vertices in the graph, or cells in the mesh.
233 . offsets     - The offset to the adjacency list for each cell
234 - adjacency   - The adjacency list for all cells
235 
236   Note: This is suitable for input to a mesh partitioner like ParMetis.
237 
238   Level: advanced
239 
240 .seealso: DMPlexCreate()
241 @*/
242 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
243 {
244   const PetscInt maxFaceCases = 30;
245   PetscInt       numFaceCases = 0;
246   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
247   PetscInt      *off, *adj;
248   PetscInt      *neighborCells = NULL;
249   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
250   PetscErrorCode ierr;
251 
252   PetscFunctionBegin;
253   /* For parallel partitioning, I think you have to communicate supports */
254   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
255   cellDim = dim - cellHeight;
256   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
257   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
258   if (cEnd - cStart == 0) {
259     if (numVertices) *numVertices = 0;
260     if (offsets)   *offsets   = NULL;
261     if (adjacency) *adjacency = NULL;
262     PetscFunctionReturn(0);
263   }
264   numCells  = cEnd - cStart;
265   faceDepth = depth - cellHeight;
266   if (dim == depth) {
267     PetscInt f, fStart, fEnd;
268 
269     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
270     /* Count neighboring cells */
271     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
272     for (f = fStart; f < fEnd; ++f) {
273       const PetscInt *support;
274       PetscInt        supportSize;
275       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
276       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
277       if (supportSize == 2) {
278         PetscInt numChildren;
279 
280         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
281         if (!numChildren) {
282           ++off[support[0]-cStart+1];
283           ++off[support[1]-cStart+1];
284         }
285       }
286     }
287     /* Prefix sum */
288     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
289     if (adjacency) {
290       PetscInt *tmp;
291 
292       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
293       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
294       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
295       /* Get neighboring cells */
296       for (f = fStart; f < fEnd; ++f) {
297         const PetscInt *support;
298         PetscInt        supportSize;
299         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
300         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
301         if (supportSize == 2) {
302           PetscInt numChildren;
303 
304           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
305           if (!numChildren) {
306             adj[tmp[support[0]-cStart]++] = support[1];
307             adj[tmp[support[1]-cStart]++] = support[0];
308           }
309         }
310       }
311 #if defined(PETSC_USE_DEBUG)
312       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
313 #endif
314       ierr = PetscFree(tmp);CHKERRQ(ierr);
315     }
316     if (numVertices) *numVertices = numCells;
317     if (offsets)   *offsets   = off;
318     if (adjacency) *adjacency = adj;
319     PetscFunctionReturn(0);
320   }
321   /* Setup face recognition */
322   if (faceDepth == 1) {
323     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
324 
325     for (c = cStart; c < cEnd; ++c) {
326       PetscInt corners;
327 
328       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
329       if (!cornersSeen[corners]) {
330         PetscInt nFV;
331 
332         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
333         cornersSeen[corners] = 1;
334 
335         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
336 
337         numFaceVertices[numFaceCases++] = nFV;
338       }
339     }
340   }
341   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
342   /* Count neighboring cells */
343   for (cell = cStart; cell < cEnd; ++cell) {
344     PetscInt numNeighbors = PETSC_DETERMINE, n;
345 
346     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
347     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
348     for (n = 0; n < numNeighbors; ++n) {
349       PetscInt        cellPair[2];
350       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
351       PetscInt        meetSize = 0;
352       const PetscInt *meet    = NULL;
353 
354       cellPair[0] = cell; cellPair[1] = neighborCells[n];
355       if (cellPair[0] == cellPair[1]) continue;
356       if (!found) {
357         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
358         if (meetSize) {
359           PetscInt f;
360 
361           for (f = 0; f < numFaceCases; ++f) {
362             if (numFaceVertices[f] == meetSize) {
363               found = PETSC_TRUE;
364               break;
365             }
366           }
367         }
368         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
369       }
370       if (found) ++off[cell-cStart+1];
371     }
372   }
373   /* Prefix sum */
374   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
375 
376   if (adjacency) {
377     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
378     /* Get neighboring cells */
379     for (cell = cStart; cell < cEnd; ++cell) {
380       PetscInt numNeighbors = PETSC_DETERMINE, n;
381       PetscInt cellOffset   = 0;
382 
383       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
384       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
385       for (n = 0; n < numNeighbors; ++n) {
386         PetscInt        cellPair[2];
387         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
388         PetscInt        meetSize = 0;
389         const PetscInt *meet    = NULL;
390 
391         cellPair[0] = cell; cellPair[1] = neighborCells[n];
392         if (cellPair[0] == cellPair[1]) continue;
393         if (!found) {
394           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
395           if (meetSize) {
396             PetscInt f;
397 
398             for (f = 0; f < numFaceCases; ++f) {
399               if (numFaceVertices[f] == meetSize) {
400                 found = PETSC_TRUE;
401                 break;
402               }
403             }
404           }
405           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
406         }
407         if (found) {
408           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
409           ++cellOffset;
410         }
411       }
412     }
413   }
414   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
415   if (numVertices) *numVertices = numCells;
416   if (offsets)   *offsets   = off;
417   if (adjacency) *adjacency = adj;
418   PetscFunctionReturn(0);
419 }
420 
421 /*@C
422   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
423 
424   Not Collective
425 
426   Input Parameters:
427 + name        - The name of a new user-defined creation routine
428 - create_func - The creation routine itself
429 
430   Notes:
431   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
432 
433   Sample usage:
434 .vb
435     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
436 .ve
437 
438   Then, your PetscPartitioner type can be chosen with the procedural interface via
439 .vb
440     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
441     PetscPartitionerSetType(PetscPartitioner, "my_part");
442 .ve
443    or at runtime via the option
444 .vb
445     -petscpartitioner_type my_part
446 .ve
447 
448   Level: advanced
449 
450 .keywords: PetscPartitioner, register
451 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
452 
453 @*/
454 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
455 {
456   PetscErrorCode ierr;
457 
458   PetscFunctionBegin;
459   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
460   PetscFunctionReturn(0);
461 }
462 
463 /*@C
464   PetscPartitionerSetType - Builds a particular PetscPartitioner
465 
466   Collective on PetscPartitioner
467 
468   Input Parameters:
469 + part - The PetscPartitioner object
470 - name - The kind of partitioner
471 
472   Options Database Key:
473 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
474 
475   Level: intermediate
476 
477 .keywords: PetscPartitioner, set, type
478 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
479 @*/
480 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
481 {
482   PetscErrorCode (*r)(PetscPartitioner);
483   PetscBool      match;
484   PetscErrorCode ierr;
485 
486   PetscFunctionBegin;
487   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
488   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
489   if (match) PetscFunctionReturn(0);
490 
491   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
492   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
493   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
494 
495   if (part->ops->destroy) {
496     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
497   }
498   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
499   ierr = (*r)(part);CHKERRQ(ierr);
500   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
501   PetscFunctionReturn(0);
502 }
503 
504 /*@C
505   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
506 
507   Not Collective
508 
509   Input Parameter:
510 . part - The PetscPartitioner
511 
512   Output Parameter:
513 . name - The PetscPartitioner type name
514 
515   Level: intermediate
516 
517 .keywords: PetscPartitioner, get, type, name
518 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
519 @*/
520 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
521 {
522   PetscErrorCode ierr;
523 
524   PetscFunctionBegin;
525   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
526   PetscValidPointer(name, 2);
527   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
528   *name = ((PetscObject) part)->type_name;
529   PetscFunctionReturn(0);
530 }
531 
532 /*@C
533   PetscPartitionerView - Views a PetscPartitioner
534 
535   Collective on PetscPartitioner
536 
537   Input Parameter:
538 + part - the PetscPartitioner object to view
539 - v    - the viewer
540 
541   Level: developer
542 
543 .seealso: PetscPartitionerDestroy()
544 @*/
545 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
546 {
547   PetscMPIInt    size;
548   PetscBool      isascii;
549   PetscErrorCode ierr;
550 
551   PetscFunctionBegin;
552   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
553   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
554   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
555   if (isascii) {
556     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
557     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
558     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
559     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
560     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
561     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
562     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
563   }
564   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
565   PetscFunctionReturn(0);
566 }
567 
568 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
569 {
570   PetscFunctionBegin;
571   if (!currentType) {
572 #if defined(PETSC_HAVE_CHACO)
573     *defaultType = PETSCPARTITIONERCHACO;
574 #elif defined(PETSC_HAVE_PARMETIS)
575     *defaultType = PETSCPARTITIONERPARMETIS;
576 #elif defined(PETSC_HAVE_PTSCOTCH)
577     *defaultType = PETSCPARTITIONERPTSCOTCH;
578 #else
579     *defaultType = PETSCPARTITIONERSIMPLE;
580 #endif
581   } else {
582     *defaultType = currentType;
583   }
584   PetscFunctionReturn(0);
585 }
586 
587 /*@
588   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
589 
590   Collective on PetscPartitioner
591 
592   Input Parameter:
593 . part - the PetscPartitioner object to set options for
594 
595   Level: developer
596 
597 .seealso: PetscPartitionerView()
598 @*/
599 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
600 {
601   const char    *defaultType;
602   char           name[256];
603   PetscBool      flg;
604   PetscErrorCode ierr;
605 
606   PetscFunctionBegin;
607   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
608   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
609   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
610   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
611   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
612   if (flg) {
613     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
614   } else if (!((PetscObject) part)->type_name) {
615     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
616   }
617   if (part->ops->setfromoptions) {
618     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
619   }
620   ierr = PetscViewerDestroy(&part->viewerGraph);CHKERRQ(ierr);
621   ierr = PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);CHKERRQ(ierr);
622   /* process any options handlers added with PetscObjectAddOptionsHandler() */
623   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
624   ierr = PetscOptionsEnd();CHKERRQ(ierr);
625   PetscFunctionReturn(0);
626 }
627 
628 /*@C
629   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
630 
631   Collective on PetscPartitioner
632 
633   Input Parameter:
634 . part - the PetscPartitioner object to setup
635 
636   Level: developer
637 
638 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
639 @*/
640 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
641 {
642   PetscErrorCode ierr;
643 
644   PetscFunctionBegin;
645   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
646   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
647   PetscFunctionReturn(0);
648 }
649 
650 /*@
651   PetscPartitionerDestroy - Destroys a PetscPartitioner object
652 
653   Collective on PetscPartitioner
654 
655   Input Parameter:
656 . part - the PetscPartitioner object to destroy
657 
658   Level: developer
659 
660 .seealso: PetscPartitionerView()
661 @*/
662 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
663 {
664   PetscErrorCode ierr;
665 
666   PetscFunctionBegin;
667   if (!*part) PetscFunctionReturn(0);
668   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
669 
670   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
671   ((PetscObject) (*part))->refct = 0;
672 
673   ierr = PetscViewerDestroy(&(*part)->viewerGraph);CHKERRQ(ierr);
674   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
675   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
676   PetscFunctionReturn(0);
677 }
678 
679 /*@
680   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
681 
682   Collective on MPI_Comm
683 
684   Input Parameter:
685 . comm - The communicator for the PetscPartitioner object
686 
687   Output Parameter:
688 . part - The PetscPartitioner object
689 
690   Level: beginner
691 
692 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
693 @*/
694 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
695 {
696   PetscPartitioner p;
697   const char       *partitionerType = NULL;
698   PetscErrorCode   ierr;
699 
700   PetscFunctionBegin;
701   PetscValidPointer(part, 2);
702   *part = NULL;
703   ierr = DMInitializePackage();CHKERRQ(ierr);
704 
705   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
706   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
707   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
708 
709   p->edgeCut = 0;
710   p->balance = 0.0;
711 
712   *part = p;
713   PetscFunctionReturn(0);
714 }
715 
716 /*@
717   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
718 
719   Collective on DM
720 
721   Input Parameters:
722 + part    - The PetscPartitioner
723 - dm      - The mesh DM
724 
725   Output Parameters:
726 + partSection     - The PetscSection giving the division of points by partition
727 - partition       - The list of points by partition
728 
729   Options Database:
730 . -petscpartitioner_view_graph - View the graph we are partitioning
731 
732   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
733 
734   Level: developer
735 
736 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
737 @*/
738 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
739 {
740   PetscMPIInt    size;
741   PetscErrorCode ierr;
742 
743   PetscFunctionBegin;
744   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
745   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
746   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
747   PetscValidPointer(partition, 5);
748   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
749   if (size == 1) {
750     PetscInt *points;
751     PetscInt  cStart, cEnd, c;
752 
753     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
754     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
755     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
756     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
757     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
758     for (c = cStart; c < cEnd; ++c) points[c] = c;
759     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
760     PetscFunctionReturn(0);
761   }
762   if (part->height == 0) {
763     PetscInt numVertices;
764     PetscInt *start     = NULL;
765     PetscInt *adjacency = NULL;
766     IS       globalNumbering;
767 
768     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
769     if (part->viewGraph) {
770       PetscViewer viewer = part->viewerGraph;
771       PetscBool   isascii;
772       PetscInt    v, i;
773       PetscMPIInt rank;
774 
775       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);CHKERRQ(ierr);
776       ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
777       if (isascii) {
778         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
779         ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);CHKERRQ(ierr);
780         for (v = 0; v < numVertices; ++v) {
781           const PetscInt s = start[v];
782           const PetscInt e = start[v+1];
783 
784           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);CHKERRQ(ierr);
785           for (i = s; i < e; ++i) {ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);CHKERRQ(ierr);}
786           ierr = PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);CHKERRQ(ierr);
787         }
788         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
789         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
790       }
791     }
792     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
793     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
794     ierr = PetscFree(start);CHKERRQ(ierr);
795     ierr = PetscFree(adjacency);CHKERRQ(ierr);
796     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
797       const PetscInt *globalNum;
798       const PetscInt *partIdx;
799       PetscInt *map, cStart, cEnd;
800       PetscInt *adjusted, i, localSize, offset;
801       IS    newPartition;
802 
803       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
804       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
805       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
806       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
807       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
808       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
809       for (i = cStart, offset = 0; i < cEnd; i++) {
810         if (globalNum[i - cStart] >= 0) map[offset++] = i;
811       }
812       for (i = 0; i < localSize; i++) {
813         adjusted[i] = map[partIdx[i]];
814       }
815       ierr = PetscFree(map);CHKERRQ(ierr);
816       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
817       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
818       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
819       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
820       ierr = ISDestroy(partition);CHKERRQ(ierr);
821       *partition = newPartition;
822     }
823   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
824   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
825   PetscFunctionReturn(0);
826 }
827 
828 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
829 {
830   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
831   PetscErrorCode          ierr;
832 
833   PetscFunctionBegin;
834   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
835   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
836   ierr = PetscFree(p);CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
841 {
842   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
843   PetscErrorCode          ierr;
844 
845   PetscFunctionBegin;
846   if (p->random) {
847     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
848     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
849     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
850   }
851   PetscFunctionReturn(0);
852 }
853 
854 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
855 {
856   PetscBool      iascii;
857   PetscErrorCode ierr;
858 
859   PetscFunctionBegin;
860   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
861   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
862   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
863   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
864   PetscFunctionReturn(0);
865 }
866 
867 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
868 {
869   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
870   PetscErrorCode          ierr;
871 
872   PetscFunctionBegin;
873   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
874   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
875   ierr = PetscOptionsTail();CHKERRQ(ierr);
876   PetscFunctionReturn(0);
877 }
878 
879 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
880 {
881   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
882   PetscInt                np;
883   PetscErrorCode          ierr;
884 
885   PetscFunctionBegin;
886   if (p->random) {
887     PetscRandom r;
888     PetscInt   *sizes, *points, v, p;
889     PetscMPIInt rank;
890 
891     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
892     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
893     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
894     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
895     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
896     for (v = 0; v < numVertices; ++v) {points[v] = v;}
897     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
898     for (v = numVertices-1; v > 0; --v) {
899       PetscReal val;
900       PetscInt  w, tmp;
901 
902       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
903       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
904       w    = PetscFloorReal(val);
905       tmp       = points[v];
906       points[v] = points[w];
907       points[w] = tmp;
908     }
909     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
910     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
911     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
912   }
913   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
914   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
915   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
916   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
917   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
918   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
919   *partition = p->partition;
920   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
921   PetscFunctionReturn(0);
922 }
923 
924 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
925 {
926   PetscFunctionBegin;
927   part->ops->view           = PetscPartitionerView_Shell;
928   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
929   part->ops->destroy        = PetscPartitionerDestroy_Shell;
930   part->ops->partition      = PetscPartitionerPartition_Shell;
931   PetscFunctionReturn(0);
932 }
933 
934 /*MC
935   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
936 
937   Level: intermediate
938 
939 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
940 M*/
941 
942 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
943 {
944   PetscPartitioner_Shell *p;
945   PetscErrorCode          ierr;
946 
947   PetscFunctionBegin;
948   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
949   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
950   part->data = p;
951 
952   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
953   p->random = PETSC_FALSE;
954   PetscFunctionReturn(0);
955 }
956 
957 /*@C
958   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
959 
960   Collective on Part
961 
962   Input Parameters:
963 + part     - The PetscPartitioner
964 . size - The number of partitions
965 . sizes    - array of size size (or NULL) providing the number of points in each partition
966 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
967 
968   Level: developer
969 
970   Notes:
971 
972     It is safe to free the sizes and points arrays after use in this routine.
973 
974 .seealso DMPlexDistribute(), PetscPartitionerCreate()
975 @*/
976 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
977 {
978   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
979   PetscInt                proc, numPoints;
980   PetscErrorCode          ierr;
981 
982   PetscFunctionBegin;
983   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
984   if (sizes)  {PetscValidPointer(sizes, 3);}
985   if (points) {PetscValidPointer(points, 4);}
986   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
987   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
988   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
989   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
990   if (sizes) {
991     for (proc = 0; proc < size; ++proc) {
992       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
993     }
994   }
995   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
996   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
997   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
998   PetscFunctionReturn(0);
999 }
1000 
1001 /*@
1002   PetscPartitionerShellSetRandom - Set the flag to use a random partition
1003 
1004   Collective on Part
1005 
1006   Input Parameters:
1007 + part   - The PetscPartitioner
1008 - random - The flag to use a random partition
1009 
1010   Level: intermediate
1011 
1012 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1013 @*/
1014 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1015 {
1016   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1017 
1018   PetscFunctionBegin;
1019   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1020   p->random = random;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 /*@
1025   PetscPartitionerShellGetRandom - get the flag to use a random partition
1026 
1027   Collective on Part
1028 
1029   Input Parameter:
1030 . part   - The PetscPartitioner
1031 
1032   Output Parameter
1033 . random - The flag to use a random partition
1034 
1035   Level: intermediate
1036 
1037 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1038 @*/
1039 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1040 {
1041   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1042 
1043   PetscFunctionBegin;
1044   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1045   PetscValidPointer(random, 2);
1046   *random = p->random;
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1051 {
1052   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1053   PetscErrorCode          ierr;
1054 
1055   PetscFunctionBegin;
1056   ierr = PetscFree(p);CHKERRQ(ierr);
1057   PetscFunctionReturn(0);
1058 }
1059 
1060 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1061 {
1062   PetscFunctionBegin;
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1067 {
1068   PetscBool      iascii;
1069   PetscErrorCode ierr;
1070 
1071   PetscFunctionBegin;
1072   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1073   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1074   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1075   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1080 {
1081   MPI_Comm       comm;
1082   PetscInt       np;
1083   PetscMPIInt    size;
1084   PetscErrorCode ierr;
1085 
1086   PetscFunctionBegin;
1087   comm = PetscObjectComm((PetscObject)part);
1088   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1089   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1090   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1091   if (size == 1) {
1092     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1093   } else {
1094     PetscMPIInt rank;
1095     PetscInt nvGlobal, *offsets, myFirst, myLast;
1096 
1097     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1098     offsets[0] = 0;
1099     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1100     for (np = 2; np <= size; np++) {
1101       offsets[np] += offsets[np-1];
1102     }
1103     nvGlobal = offsets[size];
1104     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1105     myFirst = offsets[rank];
1106     myLast  = offsets[rank + 1] - 1;
1107     ierr = PetscFree(offsets);CHKERRQ(ierr);
1108     if (numVertices) {
1109       PetscInt firstPart = 0, firstLargePart = 0;
1110       PetscInt lastPart = 0, lastLargePart = 0;
1111       PetscInt rem = nvGlobal % nparts;
1112       PetscInt pSmall = nvGlobal/nparts;
1113       PetscInt pBig = nvGlobal/nparts + 1;
1114 
1115 
1116       if (rem) {
1117         firstLargePart = myFirst / pBig;
1118         lastLargePart  = myLast  / pBig;
1119 
1120         if (firstLargePart < rem) {
1121           firstPart = firstLargePart;
1122         } else {
1123           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1124         }
1125         if (lastLargePart < rem) {
1126           lastPart = lastLargePart;
1127         } else {
1128           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1129         }
1130       } else {
1131         firstPart = myFirst / (nvGlobal/nparts);
1132         lastPart  = myLast  / (nvGlobal/nparts);
1133       }
1134 
1135       for (np = firstPart; np <= lastPart; np++) {
1136         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1137         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1138 
1139         PartStart = PetscMax(PartStart,myFirst);
1140         PartEnd   = PetscMin(PartEnd,myLast+1);
1141         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1142       }
1143     }
1144   }
1145   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1150 {
1151   PetscFunctionBegin;
1152   part->ops->view      = PetscPartitionerView_Simple;
1153   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1154   part->ops->partition = PetscPartitionerPartition_Simple;
1155   PetscFunctionReturn(0);
1156 }
1157 
1158 /*MC
1159   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1160 
1161   Level: intermediate
1162 
1163 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1164 M*/
1165 
1166 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1167 {
1168   PetscPartitioner_Simple *p;
1169   PetscErrorCode           ierr;
1170 
1171   PetscFunctionBegin;
1172   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1173   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1174   part->data = p;
1175 
1176   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1181 {
1182   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1183   PetscErrorCode          ierr;
1184 
1185   PetscFunctionBegin;
1186   ierr = PetscFree(p);CHKERRQ(ierr);
1187   PetscFunctionReturn(0);
1188 }
1189 
1190 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1191 {
1192   PetscFunctionBegin;
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1197 {
1198   PetscBool      iascii;
1199   PetscErrorCode ierr;
1200 
1201   PetscFunctionBegin;
1202   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1203   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1204   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1205   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1206   PetscFunctionReturn(0);
1207 }
1208 
1209 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1210 {
1211   PetscInt       np;
1212   PetscErrorCode ierr;
1213 
1214   PetscFunctionBegin;
1215   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1216   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1217   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1218   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1219   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1220   PetscFunctionReturn(0);
1221 }
1222 
1223 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1224 {
1225   PetscFunctionBegin;
1226   part->ops->view      = PetscPartitionerView_Gather;
1227   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1228   part->ops->partition = PetscPartitionerPartition_Gather;
1229   PetscFunctionReturn(0);
1230 }
1231 
1232 /*MC
1233   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1234 
1235   Level: intermediate
1236 
1237 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1238 M*/
1239 
1240 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1241 {
1242   PetscPartitioner_Gather *p;
1243   PetscErrorCode           ierr;
1244 
1245   PetscFunctionBegin;
1246   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1247   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1248   part->data = p;
1249 
1250   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1251   PetscFunctionReturn(0);
1252 }
1253 
1254 
1255 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1256 {
1257   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1258   PetscErrorCode          ierr;
1259 
1260   PetscFunctionBegin;
1261   ierr = PetscFree(p);CHKERRQ(ierr);
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1266 {
1267   PetscFunctionBegin;
1268   PetscFunctionReturn(0);
1269 }
1270 
1271 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1272 {
1273   PetscBool      iascii;
1274   PetscErrorCode ierr;
1275 
1276   PetscFunctionBegin;
1277   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1278   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1279   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1280   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1281   PetscFunctionReturn(0);
1282 }
1283 
1284 #if defined(PETSC_HAVE_CHACO)
1285 #if defined(PETSC_HAVE_UNISTD_H)
1286 #include <unistd.h>
1287 #endif
1288 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1289 #include <chaco.h>
1290 #else
1291 /* Older versions of Chaco do not have an include file */
1292 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1293                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1294                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1295                        int mesh_dims[3], double *goal, int global_method, int local_method,
1296                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1297 #endif
1298 extern int FREE_GRAPH;
1299 #endif
1300 
1301 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1302 {
1303 #if defined(PETSC_HAVE_CHACO)
1304   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1305   MPI_Comm       comm;
1306   int            nvtxs          = numVertices; /* number of vertices in full graph */
1307   int           *vwgts          = NULL;   /* weights for all vertices */
1308   float         *ewgts          = NULL;   /* weights for all edges */
1309   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1310   char          *outassignname  = NULL;   /*  name of assignment output file */
1311   char          *outfilename    = NULL;   /* output file name */
1312   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1313   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1314   int            mesh_dims[3];            /* dimensions of mesh of processors */
1315   double        *goal          = NULL;    /* desired set sizes for each set */
1316   int            global_method = 1;       /* global partitioning algorithm */
1317   int            local_method  = 1;       /* local partitioning algorithm */
1318   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1319   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1320   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1321   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1322   long           seed          = 123636512; /* for random graph mutations */
1323 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1324   int           *assignment;              /* Output partition */
1325 #else
1326   short int     *assignment;              /* Output partition */
1327 #endif
1328   int            fd_stdout, fd_pipe[2];
1329   PetscInt      *points;
1330   int            i, v, p;
1331   PetscErrorCode ierr;
1332 
1333   PetscFunctionBegin;
1334   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1335 #if defined (PETSC_USE_DEBUG)
1336   {
1337     int ival,isum;
1338     PetscBool distributed;
1339 
1340     ival = (numVertices > 0);
1341     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1342     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1343     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1344   }
1345 #endif
1346   if (!numVertices) {
1347     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1348     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1349     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1350     PetscFunctionReturn(0);
1351   }
1352   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1353   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1354 
1355   if (global_method == INERTIAL_METHOD) {
1356     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1357     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1358   }
1359   mesh_dims[0] = nparts;
1360   mesh_dims[1] = 1;
1361   mesh_dims[2] = 1;
1362   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1363   /* Chaco outputs to stdout. We redirect this to a buffer. */
1364   /* TODO: check error codes for UNIX calls */
1365 #if defined(PETSC_HAVE_UNISTD_H)
1366   {
1367     int piperet;
1368     piperet = pipe(fd_pipe);
1369     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1370     fd_stdout = dup(1);
1371     close(1);
1372     dup2(fd_pipe[1], 1);
1373   }
1374 #endif
1375   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1376                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1377                    vmax, ndims, eigtol, seed);
1378 #if defined(PETSC_HAVE_UNISTD_H)
1379   {
1380     char msgLog[10000];
1381     int  count;
1382 
1383     fflush(stdout);
1384     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1385     if (count < 0) count = 0;
1386     msgLog[count] = 0;
1387     close(1);
1388     dup2(fd_stdout, 1);
1389     close(fd_stdout);
1390     close(fd_pipe[0]);
1391     close(fd_pipe[1]);
1392     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1393   }
1394 #else
1395   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1396 #endif
1397   /* Convert to PetscSection+IS */
1398   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1399   for (v = 0; v < nvtxs; ++v) {
1400     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1401   }
1402   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1403   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1404   for (p = 0, i = 0; p < nparts; ++p) {
1405     for (v = 0; v < nvtxs; ++v) {
1406       if (assignment[v] == p) points[i++] = v;
1407     }
1408   }
1409   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1410   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1411   if (global_method == INERTIAL_METHOD) {
1412     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1413   }
1414   ierr = PetscFree(assignment);CHKERRQ(ierr);
1415   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1416   PetscFunctionReturn(0);
1417 #else
1418   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1419 #endif
1420 }
1421 
1422 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1423 {
1424   PetscFunctionBegin;
1425   part->ops->view      = PetscPartitionerView_Chaco;
1426   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1427   part->ops->partition = PetscPartitionerPartition_Chaco;
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 /*MC
1432   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1433 
1434   Level: intermediate
1435 
1436 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1437 M*/
1438 
1439 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1440 {
1441   PetscPartitioner_Chaco *p;
1442   PetscErrorCode          ierr;
1443 
1444   PetscFunctionBegin;
1445   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1446   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1447   part->data = p;
1448 
1449   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1450   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1451   PetscFunctionReturn(0);
1452 }
1453 
1454 static const char *ptypes[] = {"kway", "rb"};
1455 
1456 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1457 {
1458   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1459   PetscErrorCode             ierr;
1460 
1461   PetscFunctionBegin;
1462   ierr = PetscFree(p);CHKERRQ(ierr);
1463   PetscFunctionReturn(0);
1464 }
1465 
1466 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1467 {
1468   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1469   PetscErrorCode             ierr;
1470 
1471   PetscFunctionBegin;
1472   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1473   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1474   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1475   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1476   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1481 {
1482   PetscBool      iascii;
1483   PetscErrorCode ierr;
1484 
1485   PetscFunctionBegin;
1486   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1487   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1488   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1489   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1490   PetscFunctionReturn(0);
1491 }
1492 
1493 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1494 {
1495   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1496   PetscInt                  p_randomSeed = -1; /* TODO: Add this to PetscPartitioner_ParMetis */
1497   PetscErrorCode            ierr;
1498 
1499   PetscFunctionBegin;
1500   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1501   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1502   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1503   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1504   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p_randomSeed, &p_randomSeed, NULL);CHKERRQ(ierr);
1505   ierr = PetscOptionsTail();CHKERRQ(ierr);
1506   PetscFunctionReturn(0);
1507 }
1508 
1509 #if defined(PETSC_HAVE_PARMETIS)
1510 #include <parmetis.h>
1511 #endif
1512 
1513 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1514 {
1515 #if defined(PETSC_HAVE_PARMETIS)
1516   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1517   MPI_Comm       comm;
1518   PetscSection   section;
1519   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1520   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1521   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1522   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1523   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1524   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1525   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1526   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1527   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1528   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1529   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1530   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1531   PetscInt       options[64];               /* Options */
1532   /* Outputs */
1533   PetscInt       v, i, *assignment, *points;
1534   PetscMPIInt    size, rank, p;
1535   PetscInt       pm_randomSeed = -1;
1536   PetscErrorCode ierr;
1537 
1538   PetscFunctionBegin;
1539   ierr = PetscOptionsGetInt(((PetscObject)part)->options,((PetscObject)part)->prefix,"-petscpartitioner_parmetis_seed", &pm_randomSeed, NULL);CHKERRQ(ierr);
1540   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1541   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1542   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1543   /* Calculate vertex distribution */
1544   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1545   vtxdist[0] = 0;
1546   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1547   for (p = 2; p <= size; ++p) {
1548     vtxdist[p] += vtxdist[p-1];
1549   }
1550   /* Calculate partition weights */
1551   for (p = 0; p < nparts; ++p) {
1552     tpwgts[p] = 1.0/nparts;
1553   }
1554   ubvec[0] = pm->imbalanceRatio;
1555   /* Weight cells by dofs on cell by default */
1556   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1557   if (section) {
1558     PetscInt cStart, cEnd, dof;
1559 
1560     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1561     for (v = cStart; v < cEnd; ++v) {
1562       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1563       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1564       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1565       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1566     }
1567   } else {
1568     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1569   }
1570   wgtflag |= 2; /* have weights on graph vertices */
1571 
1572   if (nparts == 1) {
1573     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1574   } else {
1575     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1576     if (vtxdist[p+1] == vtxdist[size]) {
1577       if (rank == p) {
1578         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1579         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1580         options[METIS_OPTION_SEED]   = pm_randomSeed;
1581         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1582         if (metis_ptype == 1) {
1583           PetscStackPush("METIS_PartGraphRecursive");
1584           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1585           PetscStackPop;
1586           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1587         } else {
1588           /*
1589            It would be nice to activate the two options below, but they would need some actual testing.
1590            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1591            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1592           */
1593           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1594           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1595           PetscStackPush("METIS_PartGraphKway");
1596           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1597           PetscStackPop;
1598           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1599         }
1600       }
1601     } else {
1602       options[0] = 1; /*use options */
1603       options[1] = pm->debugFlag;
1604       options[2] = (pm_randomSeed == -1) ? 15 : pm_randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1605       PetscStackPush("ParMETIS_V3_PartKway");
1606       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1607       PetscStackPop;
1608       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1609     }
1610   }
1611   /* Convert to PetscSection+IS */
1612   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1613   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1614   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1615   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1616   for (p = 0, i = 0; p < nparts; ++p) {
1617     for (v = 0; v < nvtxs; ++v) {
1618       if (assignment[v] == p) points[i++] = v;
1619     }
1620   }
1621   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1622   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1623   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1624   PetscFunctionReturn(0);
1625 #else
1626   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1627 #endif
1628 }
1629 
1630 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1631 {
1632   PetscFunctionBegin;
1633   part->ops->view           = PetscPartitionerView_ParMetis;
1634   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1635   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1636   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1637   PetscFunctionReturn(0);
1638 }
1639 
1640 /*MC
1641   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1642 
1643   Level: intermediate
1644 
1645 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1646 M*/
1647 
1648 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1649 {
1650   PetscPartitioner_ParMetis *p;
1651   PetscErrorCode          ierr;
1652 
1653   PetscFunctionBegin;
1654   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1655   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1656   part->data = p;
1657 
1658   p->ptype          = 0;
1659   p->imbalanceRatio = 1.05;
1660   p->debugFlag      = 0;
1661 
1662   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1663   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1664   PetscFunctionReturn(0);
1665 }
1666 
1667 
1668 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1669 const char PTScotchPartitionerCitation[] =
1670   "@article{PTSCOTCH,\n"
1671   "  author  = {C. Chevalier and F. Pellegrini},\n"
1672   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1673   "  journal = {Parallel Computing},\n"
1674   "  volume  = {34},\n"
1675   "  number  = {6},\n"
1676   "  pages   = {318--331},\n"
1677   "  year    = {2008},\n"
1678   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1679   "}\n";
1680 
1681 typedef struct {
1682   PetscInt  strategy;
1683   PetscReal imbalance;
1684 } PetscPartitioner_PTScotch;
1685 
1686 static const char *const
1687 PTScotchStrategyList[] = {
1688   "DEFAULT",
1689   "QUALITY",
1690   "SPEED",
1691   "BALANCE",
1692   "SAFETY",
1693   "SCALABILITY",
1694   "RECURSIVE",
1695   "REMAP"
1696 };
1697 
1698 #if defined(PETSC_HAVE_PTSCOTCH)
1699 
1700 EXTERN_C_BEGIN
1701 #include <ptscotch.h>
1702 EXTERN_C_END
1703 
1704 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1705 
1706 static int PTScotch_Strategy(PetscInt strategy)
1707 {
1708   switch (strategy) {
1709   case  0: return SCOTCH_STRATDEFAULT;
1710   case  1: return SCOTCH_STRATQUALITY;
1711   case  2: return SCOTCH_STRATSPEED;
1712   case  3: return SCOTCH_STRATBALANCE;
1713   case  4: return SCOTCH_STRATSAFETY;
1714   case  5: return SCOTCH_STRATSCALABILITY;
1715   case  6: return SCOTCH_STRATRECURSIVE;
1716   case  7: return SCOTCH_STRATREMAP;
1717   default: return SCOTCH_STRATDEFAULT;
1718   }
1719 }
1720 
1721 
1722 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1723                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1724 {
1725   SCOTCH_Graph   grafdat;
1726   SCOTCH_Strat   stradat;
1727   SCOTCH_Num     vertnbr = n;
1728   SCOTCH_Num     edgenbr = xadj[n];
1729   SCOTCH_Num*    velotab = vtxwgt;
1730   SCOTCH_Num*    edlotab = adjwgt;
1731   SCOTCH_Num     flagval = strategy;
1732   double         kbalval = imbalance;
1733   PetscErrorCode ierr;
1734 
1735   PetscFunctionBegin;
1736   {
1737     PetscBool flg = PETSC_TRUE;
1738     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1739     if (!flg) velotab = NULL;
1740   }
1741   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1742   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1743   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1744   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1745 #if defined(PETSC_USE_DEBUG)
1746   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1747 #endif
1748   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1749   SCOTCH_stratExit(&stradat);
1750   SCOTCH_graphExit(&grafdat);
1751   PetscFunctionReturn(0);
1752 }
1753 
1754 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1755                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1756 {
1757   PetscMPIInt     procglbnbr;
1758   PetscMPIInt     proclocnum;
1759   SCOTCH_Arch     archdat;
1760   SCOTCH_Dgraph   grafdat;
1761   SCOTCH_Dmapping mappdat;
1762   SCOTCH_Strat    stradat;
1763   SCOTCH_Num      vertlocnbr;
1764   SCOTCH_Num      edgelocnbr;
1765   SCOTCH_Num*     veloloctab = vtxwgt;
1766   SCOTCH_Num*     edloloctab = adjwgt;
1767   SCOTCH_Num      flagval = strategy;
1768   double          kbalval = imbalance;
1769   PetscErrorCode  ierr;
1770 
1771   PetscFunctionBegin;
1772   {
1773     PetscBool flg = PETSC_TRUE;
1774     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1775     if (!flg) veloloctab = NULL;
1776   }
1777   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1778   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1779   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1780   edgelocnbr = xadj[vertlocnbr];
1781 
1782   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1783   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1784 #if defined(PETSC_USE_DEBUG)
1785   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1786 #endif
1787   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1788   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1789   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1790   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1791   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1792   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1793   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1794   SCOTCH_archExit(&archdat);
1795   SCOTCH_stratExit(&stradat);
1796   SCOTCH_dgraphExit(&grafdat);
1797   PetscFunctionReturn(0);
1798 }
1799 
1800 #endif /* PETSC_HAVE_PTSCOTCH */
1801 
1802 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1803 {
1804   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1805   PetscErrorCode             ierr;
1806 
1807   PetscFunctionBegin;
1808   ierr = PetscFree(p);CHKERRQ(ierr);
1809   PetscFunctionReturn(0);
1810 }
1811 
1812 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1813 {
1814   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1815   PetscErrorCode            ierr;
1816 
1817   PetscFunctionBegin;
1818   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1819   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1820   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1821   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1822   PetscFunctionReturn(0);
1823 }
1824 
1825 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1826 {
1827   PetscBool      iascii;
1828   PetscErrorCode ierr;
1829 
1830   PetscFunctionBegin;
1831   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1832   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1833   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1834   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1835   PetscFunctionReturn(0);
1836 }
1837 
1838 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1839 {
1840   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1841   const char *const         *slist = PTScotchStrategyList;
1842   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1843   PetscBool                 flag;
1844   PetscErrorCode            ierr;
1845 
1846   PetscFunctionBegin;
1847   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1848   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1849   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1850   ierr = PetscOptionsTail();CHKERRQ(ierr);
1851   PetscFunctionReturn(0);
1852 }
1853 
1854 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1855 {
1856 #if defined(PETSC_HAVE_PTSCOTCH)
1857   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1858   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1859   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1860   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1861   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1862   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1863   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1864   PetscInt       v, i, *assignment, *points;
1865   PetscMPIInt    size, rank, p;
1866   PetscErrorCode ierr;
1867 
1868   PetscFunctionBegin;
1869   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1870   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1871   ierr = PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1872 
1873   /* Calculate vertex distribution */
1874   vtxdist[0] = 0;
1875   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1876   for (p = 2; p <= size; ++p) {
1877     vtxdist[p] += vtxdist[p-1];
1878   }
1879 
1880   if (nparts == 1) {
1881     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1882   } else {
1883     PetscSection section;
1884     /* Weight cells by dofs on cell by default */
1885     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1886     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1887     if (section) {
1888       PetscInt vStart, vEnd, dof;
1889       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1890       for (v = vStart; v < vEnd; ++v) {
1891         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1892         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1893         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1894         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1895       }
1896     } else {
1897       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1898     }
1899     {
1900       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1901       int                       strat = PTScotch_Strategy(pts->strategy);
1902       double                    imbal = (double)pts->imbalance;
1903 
1904       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1905       if (vtxdist[p+1] == vtxdist[size]) {
1906         if (rank == p) {
1907           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1908         }
1909       } else {
1910         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1911       }
1912     }
1913     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1914   }
1915 
1916   /* Convert to PetscSection+IS */
1917   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1918   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1919   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1920   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1921   for (p = 0, i = 0; p < nparts; ++p) {
1922     for (v = 0; v < nvtxs; ++v) {
1923       if (assignment[v] == p) points[i++] = v;
1924     }
1925   }
1926   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1927   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1928 
1929   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1930   PetscFunctionReturn(0);
1931 #else
1932   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1933 #endif
1934 }
1935 
1936 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1937 {
1938   PetscFunctionBegin;
1939   part->ops->view           = PetscPartitionerView_PTScotch;
1940   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1941   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1942   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1943   PetscFunctionReturn(0);
1944 }
1945 
1946 /*MC
1947   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1948 
1949   Level: intermediate
1950 
1951 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1952 M*/
1953 
1954 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1955 {
1956   PetscPartitioner_PTScotch *p;
1957   PetscErrorCode          ierr;
1958 
1959   PetscFunctionBegin;
1960   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1961   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1962   part->data = p;
1963 
1964   p->strategy  = 0;
1965   p->imbalance = 0.01;
1966 
1967   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1968   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1969   PetscFunctionReturn(0);
1970 }
1971 
1972 
1973 /*@
1974   DMPlexGetPartitioner - Get the mesh partitioner
1975 
1976   Not collective
1977 
1978   Input Parameter:
1979 . dm - The DM
1980 
1981   Output Parameter:
1982 . part - The PetscPartitioner
1983 
1984   Level: developer
1985 
1986   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1987 
1988 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1989 @*/
1990 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1991 {
1992   DM_Plex       *mesh = (DM_Plex *) dm->data;
1993 
1994   PetscFunctionBegin;
1995   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1996   PetscValidPointer(part, 2);
1997   *part = mesh->partitioner;
1998   PetscFunctionReturn(0);
1999 }
2000 
2001 /*@
2002   DMPlexSetPartitioner - Set the mesh partitioner
2003 
2004   logically collective on dm and part
2005 
2006   Input Parameters:
2007 + dm - The DM
2008 - part - The partitioner
2009 
2010   Level: developer
2011 
2012   Note: Any existing PetscPartitioner will be destroyed.
2013 
2014 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2015 @*/
2016 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2017 {
2018   DM_Plex       *mesh = (DM_Plex *) dm->data;
2019   PetscErrorCode ierr;
2020 
2021   PetscFunctionBegin;
2022   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2023   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
2024   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
2025   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
2026   mesh->partitioner = part;
2027   PetscFunctionReturn(0);
2028 }
2029 
2030 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2031 {
2032   PetscErrorCode ierr;
2033 
2034   PetscFunctionBegin;
2035   if (up) {
2036     PetscInt parent;
2037 
2038     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
2039     if (parent != point) {
2040       PetscInt closureSize, *closure = NULL, i;
2041 
2042       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2043       for (i = 0; i < closureSize; i++) {
2044         PetscInt cpoint = closure[2*i];
2045 
2046         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2047         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
2048       }
2049       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
2050     }
2051   }
2052   if (down) {
2053     PetscInt numChildren;
2054     const PetscInt *children;
2055 
2056     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
2057     if (numChildren) {
2058       PetscInt i;
2059 
2060       for (i = 0; i < numChildren; i++) {
2061         PetscInt cpoint = children[i];
2062 
2063         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
2064         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
2065       }
2066     }
2067   }
2068   PetscFunctionReturn(0);
2069 }
2070 
2071 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
2072 
2073 PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2074 {
2075   DM_Plex        *mesh = (DM_Plex *)dm->data;
2076   PetscBool      hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2077   PetscInt       *closure = NULL, closureSize, nelems, *elems, off = 0, p, c;
2078   PetscHSetI     ht;
2079   PetscErrorCode ierr;
2080 
2081   PetscFunctionBegin;
2082   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2083   ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2084   for (p = 0; p < numPoints; ++p) {
2085     ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2086     for (c = 0; c < closureSize*2; c += 2) {
2087       ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2088       if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2089     }
2090   }
2091   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2092   ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2093   ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2094   ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2095   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2096   ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2097   ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);CHKERRQ(ierr);
2098   PetscFunctionReturn(0);
2099 }
2100 
2101 /*@
2102   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
2103 
2104   Input Parameters:
2105 + dm     - The DM
2106 - label  - DMLabel assinging ranks to remote roots
2107 
2108   Level: developer
2109 
2110 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2111 @*/
2112 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2113 {
2114   IS              rankIS,   pointIS, closureIS;
2115   const PetscInt *ranks,   *points;
2116   PetscInt        numRanks, numPoints, r;
2117   PetscErrorCode  ierr;
2118 
2119   PetscFunctionBegin;
2120   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2121   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2122   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2123   for (r = 0; r < numRanks; ++r) {
2124     const PetscInt rank = ranks[r];
2125     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2126     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2127     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2128     ierr = DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);CHKERRQ(ierr);
2129     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2130     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2131     ierr = DMLabelSetStratumIS(label, rank, closureIS);CHKERRQ(ierr);
2132     ierr = ISDestroy(&closureIS);CHKERRQ(ierr);
2133   }
2134   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2135   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2136   PetscFunctionReturn(0);
2137 }
2138 
2139 /*@
2140   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2141 
2142   Input Parameters:
2143 + dm     - The DM
2144 - label  - DMLabel assinging ranks to remote roots
2145 
2146   Level: developer
2147 
2148 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2149 @*/
2150 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2151 {
2152   IS              rankIS,   pointIS;
2153   const PetscInt *ranks,   *points;
2154   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2155   PetscInt       *adj = NULL;
2156   PetscErrorCode  ierr;
2157 
2158   PetscFunctionBegin;
2159   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2160   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2161   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2162   for (r = 0; r < numRanks; ++r) {
2163     const PetscInt rank = ranks[r];
2164 
2165     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2166     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2167     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2168     for (p = 0; p < numPoints; ++p) {
2169       adjSize = PETSC_DETERMINE;
2170       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2171       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2172     }
2173     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2174     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2175   }
2176   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2177   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2178   ierr = PetscFree(adj);CHKERRQ(ierr);
2179   PetscFunctionReturn(0);
2180 }
2181 
2182 /*@
2183   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2184 
2185   Input Parameters:
2186 + dm     - The DM
2187 - label  - DMLabel assinging ranks to remote roots
2188 
2189   Level: developer
2190 
2191   Note: This is required when generating multi-level overlaps to capture
2192   overlap points from non-neighbouring partitions.
2193 
2194 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2195 @*/
2196 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2197 {
2198   MPI_Comm        comm;
2199   PetscMPIInt     rank;
2200   PetscSF         sfPoint;
2201   DMLabel         lblRoots, lblLeaves;
2202   IS              rankIS, pointIS;
2203   const PetscInt *ranks;
2204   PetscInt        numRanks, r;
2205   PetscErrorCode  ierr;
2206 
2207   PetscFunctionBegin;
2208   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2209   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2210   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2211   /* Pull point contributions from remote leaves into local roots */
2212   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2213   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2214   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2215   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2216   for (r = 0; r < numRanks; ++r) {
2217     const PetscInt remoteRank = ranks[r];
2218     if (remoteRank == rank) continue;
2219     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2220     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2221     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2222   }
2223   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2224   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2225   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2226   /* Push point contributions from roots into remote leaves */
2227   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2228   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2229   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2230   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2231   for (r = 0; r < numRanks; ++r) {
2232     const PetscInt remoteRank = ranks[r];
2233     if (remoteRank == rank) continue;
2234     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2235     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2236     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2237   }
2238   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2239   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2240   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2241   PetscFunctionReturn(0);
2242 }
2243 
2244 /*@
2245   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2246 
2247   Input Parameters:
2248 + dm        - The DM
2249 . rootLabel - DMLabel assinging ranks to local roots
2250 . processSF - A star forest mapping into the local index on each remote rank
2251 
2252   Output Parameter:
2253 - leafLabel - DMLabel assinging ranks to remote roots
2254 
2255   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2256   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2257 
2258   Level: developer
2259 
2260 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2261 @*/
2262 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2263 {
2264   MPI_Comm           comm;
2265   PetscMPIInt        rank, size, r;
2266   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2267   PetscSF            sfPoint;
2268   PetscSection       rootSection;
2269   PetscSFNode       *rootPoints, *leafPoints;
2270   const PetscSFNode *remote;
2271   const PetscInt    *local, *neighbors;
2272   IS                 valueIS;
2273   PetscBool          mpiOverflow = PETSC_FALSE;
2274   PetscErrorCode     ierr;
2275 
2276   PetscFunctionBegin;
2277   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2278   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2279   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2280   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2281 
2282   /* Convert to (point, rank) and use actual owners */
2283   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2284   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2285   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2286   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2287   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2288   for (n = 0; n < numNeighbors; ++n) {
2289     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2290     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2291   }
2292   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2293   ierr = PetscSectionGetStorageSize(rootSection, &rootSize);CHKERRQ(ierr);
2294   ierr = PetscMalloc1(rootSize, &rootPoints);CHKERRQ(ierr);
2295   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2296   for (n = 0; n < numNeighbors; ++n) {
2297     IS              pointIS;
2298     const PetscInt *points;
2299 
2300     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2301     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2302     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2303     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2304     for (p = 0; p < numPoints; ++p) {
2305       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2306       else       {l = -1;}
2307       if (l >= 0) {rootPoints[off+p] = remote[l];}
2308       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2309     }
2310     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2311     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2312   }
2313 
2314   /* Try to communicate overlap using All-to-All */
2315   if (!processSF) {
2316     PetscInt64  counter = 0;
2317     PetscBool   locOverflow = PETSC_FALSE;
2318     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;
2319 
2320     ierr = PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);CHKERRQ(ierr);
2321     for (n = 0; n < numNeighbors; ++n) {
2322       ierr = PetscSectionGetDof(rootSection, neighbors[n], &dof);CHKERRQ(ierr);
2323       ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2324 #if defined(PETSC_USE_64BIT_INDICES)
2325       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2326       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2327 #endif
2328       scounts[neighbors[n]] = (PetscMPIInt) dof;
2329       sdispls[neighbors[n]] = (PetscMPIInt) off;
2330     }
2331     ierr = MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);CHKERRQ(ierr);
2332     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2333     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2334     ierr = MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
2335     if (!mpiOverflow) {
2336       leafSize = (PetscInt) counter;
2337       ierr = PetscMalloc1(leafSize, &leafPoints);CHKERRQ(ierr);
2338       ierr = MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);CHKERRQ(ierr);
2339     }
2340     ierr = PetscFree4(scounts, sdispls, rcounts, rdispls);CHKERRQ(ierr);
2341   }
2342 
2343   /* Communicate overlap using process star forest */
2344   if (processSF || mpiOverflow) {
2345     PetscSF      procSF;
2346     PetscSFNode  *remote;
2347     PetscSection leafSection;
2348 
2349     if (processSF) {
2350       ierr = PetscObjectReference((PetscObject)processSF);CHKERRQ(ierr);
2351       procSF = processSF;
2352     } else {
2353       ierr = PetscMalloc1(size, &remote);CHKERRQ(ierr);
2354       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2355       ierr = PetscSFCreate(comm, &procSF);CHKERRQ(ierr);
2356       ierr = PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2357     }
2358 
2359     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);CHKERRQ(ierr);
2360     ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2361     ierr = PetscSectionGetStorageSize(leafSection, &leafSize);CHKERRQ(ierr);
2362     ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2363     ierr = PetscSFDestroy(&procSF);CHKERRQ(ierr);
2364   }
2365 
2366   for (p = 0; p < leafSize; p++) {
2367     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2368   }
2369 
2370   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2371   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2372   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2373   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2374   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2375   PetscFunctionReturn(0);
2376 }
2377 
2378 /*@
2379   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2380 
2381   Input Parameters:
2382 + dm    - The DM
2383 . label - DMLabel assinging ranks to remote roots
2384 
2385   Output Parameter:
2386 - sf    - The star forest communication context encapsulating the defined mapping
2387 
2388   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2389 
2390   Level: developer
2391 
2392 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2393 @*/
2394 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2395 {
2396   PetscMPIInt     rank, size;
2397   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2398   PetscSFNode    *remotePoints;
2399   IS              remoteRootIS;
2400   const PetscInt *remoteRoots;
2401   PetscErrorCode ierr;
2402 
2403   PetscFunctionBegin;
2404   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2405   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2406 
2407   for (numRemote = 0, n = 0; n < size; ++n) {
2408     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2409     numRemote += numPoints;
2410   }
2411   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2412   /* Put owned points first */
2413   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2414   if (numPoints > 0) {
2415     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2416     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2417     for (p = 0; p < numPoints; p++) {
2418       remotePoints[idx].index = remoteRoots[p];
2419       remotePoints[idx].rank = rank;
2420       idx++;
2421     }
2422     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2423     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2424   }
2425   /* Now add remote points */
2426   for (n = 0; n < size; ++n) {
2427     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2428     if (numPoints <= 0 || n == rank) continue;
2429     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2430     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2431     for (p = 0; p < numPoints; p++) {
2432       remotePoints[idx].index = remoteRoots[p];
2433       remotePoints[idx].rank = n;
2434       idx++;
2435     }
2436     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2437     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2438   }
2439   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2440   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2441   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2442   PetscFunctionReturn(0);
2443 }
2444