xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 6bb9daa836dec9a2b9832f99eac689784c17c492)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410   }
411   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
470 {
471   PetscFunctionBegin;
472   if (!currentType) {
473 #if defined(PETSC_HAVE_CHACO)
474     *defaultType = PETSCPARTITIONERCHACO;
475 #elif defined(PETSC_HAVE_PARMETIS)
476     *defaultType = PETSCPARTITIONERPARMETIS;
477 #elif defined(PETSC_HAVE_PTSCOTCH)
478     *defaultType = PETSCPARTITIONERPTSCOTCH;
479 #else
480     *defaultType = PETSCPARTITIONERSIMPLE;
481 #endif
482   } else {
483     *defaultType = currentType;
484   }
485   PetscFunctionReturn(0);
486 }
487 
488 /*@
489   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
490 
491   Collective on PetscPartitioner
492 
493   Input Parameter:
494 . part - the PetscPartitioner object to set options for
495 
496   Level: developer
497 
498 .seealso: PetscPartitionerView()
499 @*/
500 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
501 {
502   const char    *defaultType;
503   char           name[256];
504   PetscBool      flg;
505   PetscErrorCode ierr;
506 
507   PetscFunctionBegin;
508   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
509   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
510   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
511   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
512   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
513   if (flg) {
514     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
515   } else if (!((PetscObject) part)->type_name) {
516     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
517   }
518   if (part->ops->setfromoptions) {
519     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
520   }
521   /* process any options handlers added with PetscObjectAddOptionsHandler() */
522   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
523   ierr = PetscOptionsEnd();CHKERRQ(ierr);
524   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
525   PetscFunctionReturn(0);
526 }
527 
528 /*@C
529   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
530 
531   Collective on PetscPartitioner
532 
533   Input Parameter:
534 . part - the PetscPartitioner object to setup
535 
536   Level: developer
537 
538 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
539 @*/
540 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
541 {
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
546   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
547   PetscFunctionReturn(0);
548 }
549 
550 /*@
551   PetscPartitionerDestroy - Destroys a PetscPartitioner object
552 
553   Collective on PetscPartitioner
554 
555   Input Parameter:
556 . part - the PetscPartitioner object to destroy
557 
558   Level: developer
559 
560 .seealso: PetscPartitionerView()
561 @*/
562 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
563 {
564   PetscErrorCode ierr;
565 
566   PetscFunctionBegin;
567   if (!*part) PetscFunctionReturn(0);
568   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
569 
570   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
571   ((PetscObject) (*part))->refct = 0;
572 
573   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
574   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
575   PetscFunctionReturn(0);
576 }
577 
578 /*@
579   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
580 
581   Collective on MPI_Comm
582 
583   Input Parameter:
584 . comm - The communicator for the PetscPartitioner object
585 
586   Output Parameter:
587 . part - The PetscPartitioner object
588 
589   Level: beginner
590 
591 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
592 @*/
593 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
594 {
595   PetscPartitioner p;
596   const char       *partitionerType = NULL;
597   PetscErrorCode   ierr;
598 
599   PetscFunctionBegin;
600   PetscValidPointer(part, 2);
601   *part = NULL;
602   ierr = DMInitializePackage();CHKERRQ(ierr);
603 
604   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
605   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
606   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
607 
608   *part = p;
609   PetscFunctionReturn(0);
610 }
611 
612 /*@
613   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
614 
615   Collective on DM
616 
617   Input Parameters:
618 + part    - The PetscPartitioner
619 - dm      - The mesh DM
620 
621   Output Parameters:
622 + partSection     - The PetscSection giving the division of points by partition
623 - partition       - The list of points by partition
624 
625   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
626 
627   Level: developer
628 
629 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
630 @*/
631 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
632 {
633   PetscMPIInt    size;
634   PetscErrorCode ierr;
635 
636   PetscFunctionBegin;
637   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
638   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
639   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
640   PetscValidPointer(partition, 5);
641   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
642   if (size == 1) {
643     PetscInt *points;
644     PetscInt  cStart, cEnd, c;
645 
646     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
647     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
648     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
649     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
650     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
651     for (c = cStart; c < cEnd; ++c) points[c] = c;
652     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
653     PetscFunctionReturn(0);
654   }
655   if (part->height == 0) {
656     PetscInt numVertices;
657     PetscInt *start     = NULL;
658     PetscInt *adjacency = NULL;
659     IS       globalNumbering;
660 
661     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
662     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
663     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
664     ierr = PetscFree(start);CHKERRQ(ierr);
665     ierr = PetscFree(adjacency);CHKERRQ(ierr);
666     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
667       const PetscInt *globalNum;
668       const PetscInt *partIdx;
669       PetscInt *map, cStart, cEnd;
670       PetscInt *adjusted, i, localSize, offset;
671       IS    newPartition;
672 
673       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
674       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
675       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
676       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
677       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
678       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
679       for (i = cStart, offset = 0; i < cEnd; i++) {
680         if (globalNum[i - cStart] >= 0) map[offset++] = i;
681       }
682       for (i = 0; i < localSize; i++) {
683         adjusted[i] = map[partIdx[i]];
684       }
685       ierr = PetscFree(map);CHKERRQ(ierr);
686       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
687       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
688       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
689       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
690       ierr = ISDestroy(partition);CHKERRQ(ierr);
691       *partition = newPartition;
692     }
693   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
694   PetscFunctionReturn(0);
695 
696 }
697 
698 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
699 {
700   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
701   PetscErrorCode          ierr;
702 
703   PetscFunctionBegin;
704   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
705   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
706   ierr = PetscFree(p);CHKERRQ(ierr);
707   PetscFunctionReturn(0);
708 }
709 
710 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
711 {
712   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
713   PetscViewerFormat       format;
714   PetscErrorCode          ierr;
715 
716   PetscFunctionBegin;
717   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
718   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
719   if (p->random) {
720     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
721     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
722     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
723   }
724   PetscFunctionReturn(0);
725 }
726 
727 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
728 {
729   PetscBool      iascii;
730   PetscErrorCode ierr;
731 
732   PetscFunctionBegin;
733   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
734   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
735   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
736   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
737   PetscFunctionReturn(0);
738 }
739 
740 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
741 {
742   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
743   PetscErrorCode          ierr;
744 
745   PetscFunctionBegin;
746   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
747   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
748   ierr = PetscOptionsTail();CHKERRQ(ierr);
749   PetscFunctionReturn(0);
750 }
751 
752 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
753 {
754   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
755   PetscInt                np;
756   PetscErrorCode          ierr;
757 
758   PetscFunctionBegin;
759   if (p->random) {
760     PetscRandom r;
761     PetscInt   *sizes, *points, v, p;
762     PetscMPIInt rank;
763 
764     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
765     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
766     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
767     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
768     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
769     for (v = 0; v < numVertices; ++v) {points[v] = v;}
770     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
771     for (v = numVertices-1; v > 0; --v) {
772       PetscReal val;
773       PetscInt  w, tmp;
774 
775       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
776       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
777       w    = PetscFloorReal(val);
778       tmp       = points[v];
779       points[v] = points[w];
780       points[w] = tmp;
781     }
782     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
783     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
784     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
785   }
786   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
787   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
788   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
789   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
790   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
791   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
792   *partition = p->partition;
793   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
794   PetscFunctionReturn(0);
795 }
796 
797 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
798 {
799   PetscFunctionBegin;
800   part->ops->view           = PetscPartitionerView_Shell;
801   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
802   part->ops->destroy        = PetscPartitionerDestroy_Shell;
803   part->ops->partition      = PetscPartitionerPartition_Shell;
804   PetscFunctionReturn(0);
805 }
806 
807 /*MC
808   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
809 
810   Level: intermediate
811 
812 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
813 M*/
814 
815 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
816 {
817   PetscPartitioner_Shell *p;
818   PetscErrorCode          ierr;
819 
820   PetscFunctionBegin;
821   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
822   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
823   part->data = p;
824 
825   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
826   p->random = PETSC_FALSE;
827   PetscFunctionReturn(0);
828 }
829 
830 /*@C
831   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
832 
833   Collective on Part
834 
835   Input Parameters:
836 + part     - The PetscPartitioner
837 . size - The number of partitions
838 . sizes    - array of size size (or NULL) providing the number of points in each partition
839 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
840 
841   Level: developer
842 
843   Notes:
844 
845     It is safe to free the sizes and points arrays after use in this routine.
846 
847 .seealso DMPlexDistribute(), PetscPartitionerCreate()
848 @*/
849 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
850 {
851   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
852   PetscInt                proc, numPoints;
853   PetscErrorCode          ierr;
854 
855   PetscFunctionBegin;
856   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
857   if (sizes)  {PetscValidPointer(sizes, 3);}
858   if (points) {PetscValidPointer(points, 4);}
859   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
860   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
861   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
862   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
863   if (sizes) {
864     for (proc = 0; proc < size; ++proc) {
865       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
866     }
867   }
868   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
869   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
870   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
871   PetscFunctionReturn(0);
872 }
873 
874 /*@
875   PetscPartitionerShellSetRandom - Set the flag to use a random partition
876 
877   Collective on Part
878 
879   Input Parameters:
880 + part   - The PetscPartitioner
881 - random - The flag to use a random partition
882 
883   Level: intermediate
884 
885 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
886 @*/
887 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
888 {
889   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
890 
891   PetscFunctionBegin;
892   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
893   p->random = random;
894   PetscFunctionReturn(0);
895 }
896 
897 /*@
898   PetscPartitionerShellGetRandom - get the flag to use a random partition
899 
900   Collective on Part
901 
902   Input Parameter:
903 . part   - The PetscPartitioner
904 
905   Output Parameter
906 . random - The flag to use a random partition
907 
908   Level: intermediate
909 
910 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
911 @*/
912 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
913 {
914   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
915 
916   PetscFunctionBegin;
917   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
918   PetscValidPointer(random, 2);
919   *random = p->random;
920   PetscFunctionReturn(0);
921 }
922 
923 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
924 {
925   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
926   PetscErrorCode          ierr;
927 
928   PetscFunctionBegin;
929   ierr = PetscFree(p);CHKERRQ(ierr);
930   PetscFunctionReturn(0);
931 }
932 
933 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
934 {
935   PetscViewerFormat format;
936   PetscErrorCode    ierr;
937 
938   PetscFunctionBegin;
939   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
940   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
941   PetscFunctionReturn(0);
942 }
943 
944 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
945 {
946   PetscBool      iascii;
947   PetscErrorCode ierr;
948 
949   PetscFunctionBegin;
950   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
951   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
952   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
953   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
954   PetscFunctionReturn(0);
955 }
956 
957 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
958 {
959   MPI_Comm       comm;
960   PetscInt       np;
961   PetscMPIInt    size;
962   PetscErrorCode ierr;
963 
964   PetscFunctionBegin;
965   comm = PetscObjectComm((PetscObject)dm);
966   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
967   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
968   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
969   if (size == 1) {
970     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
971   }
972   else {
973     PetscMPIInt rank;
974     PetscInt nvGlobal, *offsets, myFirst, myLast;
975 
976     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
977     offsets[0] = 0;
978     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
979     for (np = 2; np <= size; np++) {
980       offsets[np] += offsets[np-1];
981     }
982     nvGlobal = offsets[size];
983     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
984     myFirst = offsets[rank];
985     myLast  = offsets[rank + 1] - 1;
986     ierr = PetscFree(offsets);CHKERRQ(ierr);
987     if (numVertices) {
988       PetscInt firstPart = 0, firstLargePart = 0;
989       PetscInt lastPart = 0, lastLargePart = 0;
990       PetscInt rem = nvGlobal % nparts;
991       PetscInt pSmall = nvGlobal/nparts;
992       PetscInt pBig = nvGlobal/nparts + 1;
993 
994 
995       if (rem) {
996         firstLargePart = myFirst / pBig;
997         lastLargePart  = myLast  / pBig;
998 
999         if (firstLargePart < rem) {
1000           firstPart = firstLargePart;
1001         }
1002         else {
1003           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1004         }
1005         if (lastLargePart < rem) {
1006           lastPart = lastLargePart;
1007         }
1008         else {
1009           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1010         }
1011       }
1012       else {
1013         firstPart = myFirst / (nvGlobal/nparts);
1014         lastPart  = myLast  / (nvGlobal/nparts);
1015       }
1016 
1017       for (np = firstPart; np <= lastPart; np++) {
1018         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1019         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1020 
1021         PartStart = PetscMax(PartStart,myFirst);
1022         PartEnd   = PetscMin(PartEnd,myLast+1);
1023         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1024       }
1025     }
1026   }
1027   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1032 {
1033   PetscFunctionBegin;
1034   part->ops->view      = PetscPartitionerView_Simple;
1035   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1036   part->ops->partition = PetscPartitionerPartition_Simple;
1037   PetscFunctionReturn(0);
1038 }
1039 
1040 /*MC
1041   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1042 
1043   Level: intermediate
1044 
1045 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1046 M*/
1047 
1048 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1049 {
1050   PetscPartitioner_Simple *p;
1051   PetscErrorCode           ierr;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1055   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1056   part->data = p;
1057 
1058   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1059   PetscFunctionReturn(0);
1060 }
1061 
1062 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1063 {
1064   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1065   PetscErrorCode          ierr;
1066 
1067   PetscFunctionBegin;
1068   ierr = PetscFree(p);CHKERRQ(ierr);
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1073 {
1074   PetscViewerFormat format;
1075   PetscErrorCode    ierr;
1076 
1077   PetscFunctionBegin;
1078   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1079   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1080   PetscFunctionReturn(0);
1081 }
1082 
1083 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1084 {
1085   PetscBool      iascii;
1086   PetscErrorCode ierr;
1087 
1088   PetscFunctionBegin;
1089   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1090   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1091   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1092   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1097 {
1098   PetscInt       np;
1099   PetscErrorCode ierr;
1100 
1101   PetscFunctionBegin;
1102   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1103   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1104   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1105   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1106   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1107   PetscFunctionReturn(0);
1108 }
1109 
1110 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1111 {
1112   PetscFunctionBegin;
1113   part->ops->view      = PetscPartitionerView_Gather;
1114   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1115   part->ops->partition = PetscPartitionerPartition_Gather;
1116   PetscFunctionReturn(0);
1117 }
1118 
1119 /*MC
1120   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1121 
1122   Level: intermediate
1123 
1124 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1125 M*/
1126 
1127 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1128 {
1129   PetscPartitioner_Gather *p;
1130   PetscErrorCode           ierr;
1131 
1132   PetscFunctionBegin;
1133   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1134   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1135   part->data = p;
1136 
1137   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1138   PetscFunctionReturn(0);
1139 }
1140 
1141 
1142 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1143 {
1144   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1145   PetscErrorCode          ierr;
1146 
1147   PetscFunctionBegin;
1148   ierr = PetscFree(p);CHKERRQ(ierr);
1149   PetscFunctionReturn(0);
1150 }
1151 
1152 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1153 {
1154   PetscViewerFormat format;
1155   PetscErrorCode    ierr;
1156 
1157   PetscFunctionBegin;
1158   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1159   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1160   PetscFunctionReturn(0);
1161 }
1162 
1163 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1164 {
1165   PetscBool      iascii;
1166   PetscErrorCode ierr;
1167 
1168   PetscFunctionBegin;
1169   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1170   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1171   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1172   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1173   PetscFunctionReturn(0);
1174 }
1175 
1176 #if defined(PETSC_HAVE_CHACO)
1177 #if defined(PETSC_HAVE_UNISTD_H)
1178 #include <unistd.h>
1179 #endif
1180 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1181 #include <chaco.h>
1182 #else
1183 /* Older versions of Chaco do not have an include file */
1184 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1185                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1186                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1187                        int mesh_dims[3], double *goal, int global_method, int local_method,
1188                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1189 #endif
1190 extern int FREE_GRAPH;
1191 #endif
1192 
1193 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1194 {
1195 #if defined(PETSC_HAVE_CHACO)
1196   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1197   MPI_Comm       comm;
1198   int            nvtxs          = numVertices; /* number of vertices in full graph */
1199   int           *vwgts          = NULL;   /* weights for all vertices */
1200   float         *ewgts          = NULL;   /* weights for all edges */
1201   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1202   char          *outassignname  = NULL;   /*  name of assignment output file */
1203   char          *outfilename    = NULL;   /* output file name */
1204   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1205   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1206   int            mesh_dims[3];            /* dimensions of mesh of processors */
1207   double        *goal          = NULL;    /* desired set sizes for each set */
1208   int            global_method = 1;       /* global partitioning algorithm */
1209   int            local_method  = 1;       /* local partitioning algorithm */
1210   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1211   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1212   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1213   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1214   long           seed          = 123636512; /* for random graph mutations */
1215 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1216   int           *assignment;              /* Output partition */
1217 #else
1218   short int     *assignment;              /* Output partition */
1219 #endif
1220   int            fd_stdout, fd_pipe[2];
1221   PetscInt      *points;
1222   int            i, v, p;
1223   PetscErrorCode ierr;
1224 
1225   PetscFunctionBegin;
1226   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1227 #if defined (PETSC_USE_DEBUG)
1228   {
1229     int ival,isum;
1230     PetscBool distributed;
1231 
1232     ival = (numVertices > 0);
1233     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1234     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1235     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1236   }
1237 #endif
1238   if (!numVertices) {
1239     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1240     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1241     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1242     PetscFunctionReturn(0);
1243   }
1244   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1245   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1246 
1247   if (global_method == INERTIAL_METHOD) {
1248     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1249     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1250   }
1251   mesh_dims[0] = nparts;
1252   mesh_dims[1] = 1;
1253   mesh_dims[2] = 1;
1254   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1255   /* Chaco outputs to stdout. We redirect this to a buffer. */
1256   /* TODO: check error codes for UNIX calls */
1257 #if defined(PETSC_HAVE_UNISTD_H)
1258   {
1259     int piperet;
1260     piperet = pipe(fd_pipe);
1261     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1262     fd_stdout = dup(1);
1263     close(1);
1264     dup2(fd_pipe[1], 1);
1265   }
1266 #endif
1267   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1268                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1269                    vmax, ndims, eigtol, seed);
1270 #if defined(PETSC_HAVE_UNISTD_H)
1271   {
1272     char msgLog[10000];
1273     int  count;
1274 
1275     fflush(stdout);
1276     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1277     if (count < 0) count = 0;
1278     msgLog[count] = 0;
1279     close(1);
1280     dup2(fd_stdout, 1);
1281     close(fd_stdout);
1282     close(fd_pipe[0]);
1283     close(fd_pipe[1]);
1284     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1285   }
1286 #else
1287   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1288 #endif
1289   /* Convert to PetscSection+IS */
1290   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1291   for (v = 0; v < nvtxs; ++v) {
1292     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1293   }
1294   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1295   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1296   for (p = 0, i = 0; p < nparts; ++p) {
1297     for (v = 0; v < nvtxs; ++v) {
1298       if (assignment[v] == p) points[i++] = v;
1299     }
1300   }
1301   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1302   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1303   if (global_method == INERTIAL_METHOD) {
1304     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1305   }
1306   ierr = PetscFree(assignment);CHKERRQ(ierr);
1307   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1308   PetscFunctionReturn(0);
1309 #else
1310   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1311 #endif
1312 }
1313 
1314 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1315 {
1316   PetscFunctionBegin;
1317   part->ops->view      = PetscPartitionerView_Chaco;
1318   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1319   part->ops->partition = PetscPartitionerPartition_Chaco;
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 /*MC
1324   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1325 
1326   Level: intermediate
1327 
1328 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1329 M*/
1330 
1331 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1332 {
1333   PetscPartitioner_Chaco *p;
1334   PetscErrorCode          ierr;
1335 
1336   PetscFunctionBegin;
1337   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1338   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1339   part->data = p;
1340 
1341   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1342   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1347 {
1348   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1349   PetscErrorCode             ierr;
1350 
1351   PetscFunctionBegin;
1352   ierr = PetscFree(p);CHKERRQ(ierr);
1353   PetscFunctionReturn(0);
1354 }
1355 
1356 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1357 {
1358   PetscViewerFormat format;
1359   PetscErrorCode    ierr;
1360 
1361   PetscFunctionBegin;
1362   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1363   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1364   PetscFunctionReturn(0);
1365 }
1366 
1367 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1368 {
1369   PetscBool      iascii;
1370   PetscErrorCode ierr;
1371 
1372   PetscFunctionBegin;
1373   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1374   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1375   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1376   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1377   PetscFunctionReturn(0);
1378 }
1379 
1380 #if defined(PETSC_HAVE_PARMETIS)
1381 #include <parmetis.h>
1382 #endif
1383 
1384 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1385 {
1386 #if defined(PETSC_HAVE_PARMETIS)
1387   MPI_Comm       comm;
1388   PetscSection   section;
1389   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1390   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1391   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1392   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1393   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1394   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1395   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1396   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1397   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1398   real_t        *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1399   real_t        *ubvec;                    /* The balance intolerance for vertex weights */
1400   PetscInt       options[64];              /* Options */
1401   /* Outputs */
1402   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1403   PetscInt       v, i, *assignment, *points;
1404   PetscMPIInt    size, rank, p;
1405   PetscErrorCode ierr;
1406 
1407   PetscFunctionBegin;
1408   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1409   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1410   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1411   options[0] = 0; /* Use all defaults */
1412   /* Calculate vertex distribution */
1413   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1414   vtxdist[0] = 0;
1415   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1416   for (p = 2; p <= size; ++p) {
1417     vtxdist[p] += vtxdist[p-1];
1418   }
1419   /* Calculate weights */
1420   for (p = 0; p < nparts; ++p) {
1421     tpwgts[p] = 1.0/nparts;
1422   }
1423   ubvec[0] = 1.05;
1424   /* Weight cells by dofs on cell by default */
1425   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1426   if (section) {
1427     PetscInt cStart, cEnd, dof;
1428 
1429     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1430     for (v = cStart; v < cEnd; ++v) {
1431       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1432       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1433       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1434       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1435     }
1436   } else {
1437     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1438   }
1439 
1440   if (nparts == 1) {
1441     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1442   } else {
1443     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1444     if (vtxdist[p+1] == vtxdist[size]) {
1445       if (rank == p) {
1446         PetscStackPush("METIS_PartGraphKway");
1447         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1448         PetscStackPop;
1449         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1450       }
1451     } else {
1452       PetscStackPush("ParMETIS_V3_PartKway");
1453       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1454       PetscStackPop;
1455       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1456     }
1457   }
1458   /* Convert to PetscSection+IS */
1459   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1460   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1461   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1462   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1463   for (p = 0, i = 0; p < nparts; ++p) {
1464     for (v = 0; v < nvtxs; ++v) {
1465       if (assignment[v] == p) points[i++] = v;
1466     }
1467   }
1468   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1469   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1470   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1471   PetscFunctionReturn(0);
1472 #else
1473   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1474 #endif
1475 }
1476 
1477 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1478 {
1479   PetscFunctionBegin;
1480   part->ops->view      = PetscPartitionerView_ParMetis;
1481   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1482   part->ops->partition = PetscPartitionerPartition_ParMetis;
1483   PetscFunctionReturn(0);
1484 }
1485 
1486 /*MC
1487   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1488 
1489   Level: intermediate
1490 
1491 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1492 M*/
1493 
1494 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1495 {
1496   PetscPartitioner_ParMetis *p;
1497   PetscErrorCode          ierr;
1498 
1499   PetscFunctionBegin;
1500   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1501   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1502   part->data = p;
1503 
1504   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1505   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1506   PetscFunctionReturn(0);
1507 }
1508 
1509 
1510 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1511 const char PTScotchPartitionerCitation[] =
1512   "@article{PTSCOTCH,\n"
1513   "  author  = {C. Chevalier and F. Pellegrini},\n"
1514   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1515   "  journal = {Parallel Computing},\n"
1516   "  volume  = {34},\n"
1517   "  number  = {6},\n"
1518   "  pages   = {318--331},\n"
1519   "  year    = {2008},\n"
1520   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1521   "}\n";
1522 
1523 typedef struct {
1524   PetscInt  strategy;
1525   PetscReal imbalance;
1526 } PetscPartitioner_PTScotch;
1527 
1528 static const char *const
1529 PTScotchStrategyList[] = {
1530   "DEFAULT",
1531   "QUALITY",
1532   "SPEED",
1533   "BALANCE",
1534   "SAFETY",
1535   "SCALABILITY",
1536   "RECURSIVE",
1537   "REMAP"
1538 };
1539 
1540 #if defined(PETSC_HAVE_PTSCOTCH)
1541 
1542 EXTERN_C_BEGIN
1543 #include <ptscotch.h>
1544 EXTERN_C_END
1545 
1546 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1547 
1548 static int PTScotch_Strategy(PetscInt strategy)
1549 {
1550   switch (strategy) {
1551   case  0: return SCOTCH_STRATDEFAULT;
1552   case  1: return SCOTCH_STRATQUALITY;
1553   case  2: return SCOTCH_STRATSPEED;
1554   case  3: return SCOTCH_STRATBALANCE;
1555   case  4: return SCOTCH_STRATSAFETY;
1556   case  5: return SCOTCH_STRATSCALABILITY;
1557   case  6: return SCOTCH_STRATRECURSIVE;
1558   case  7: return SCOTCH_STRATREMAP;
1559   default: return SCOTCH_STRATDEFAULT;
1560   }
1561 }
1562 
1563 
1564 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1565                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1566 {
1567   SCOTCH_Graph   grafdat;
1568   SCOTCH_Strat   stradat;
1569   SCOTCH_Num     vertnbr = n;
1570   SCOTCH_Num     edgenbr = xadj[n];
1571   SCOTCH_Num*    velotab = vtxwgt;
1572   SCOTCH_Num*    edlotab = adjwgt;
1573   SCOTCH_Num     flagval = strategy;
1574   double         kbalval = imbalance;
1575   PetscErrorCode ierr;
1576 
1577   PetscFunctionBegin;
1578   {
1579     PetscBool flg = PETSC_TRUE;
1580     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1581     if (!flg) velotab = NULL;
1582   }
1583   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1584   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1585   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1586   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1587 #if defined(PETSC_USE_DEBUG)
1588   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1589 #endif
1590   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1591   SCOTCH_stratExit(&stradat);
1592   SCOTCH_graphExit(&grafdat);
1593   PetscFunctionReturn(0);
1594 }
1595 
1596 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1597                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1598 {
1599   PetscMPIInt     procglbnbr;
1600   PetscMPIInt     proclocnum;
1601   SCOTCH_Arch     archdat;
1602   SCOTCH_Dgraph   grafdat;
1603   SCOTCH_Dmapping mappdat;
1604   SCOTCH_Strat    stradat;
1605   SCOTCH_Num      vertlocnbr;
1606   SCOTCH_Num      edgelocnbr;
1607   SCOTCH_Num*     veloloctab = vtxwgt;
1608   SCOTCH_Num*     edloloctab = adjwgt;
1609   SCOTCH_Num      flagval = strategy;
1610   double          kbalval = imbalance;
1611   PetscErrorCode  ierr;
1612 
1613   PetscFunctionBegin;
1614   {
1615     PetscBool flg = PETSC_TRUE;
1616     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1617     if (!flg) veloloctab = NULL;
1618   }
1619   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1620   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1621   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1622   edgelocnbr = xadj[vertlocnbr];
1623 
1624   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1625   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1626 #if defined(PETSC_USE_DEBUG)
1627   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1628 #endif
1629   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1630   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1631   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1632   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1633   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1634   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1635   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1636   SCOTCH_archExit(&archdat);
1637   SCOTCH_stratExit(&stradat);
1638   SCOTCH_dgraphExit(&grafdat);
1639   PetscFunctionReturn(0);
1640 }
1641 
1642 #endif /* PETSC_HAVE_PTSCOTCH */
1643 
1644 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1645 {
1646   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1647   PetscErrorCode             ierr;
1648 
1649   PetscFunctionBegin;
1650   ierr = PetscFree(p);CHKERRQ(ierr);
1651   PetscFunctionReturn(0);
1652 }
1653 
1654 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1655 {
1656   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1657   PetscErrorCode            ierr;
1658 
1659   PetscFunctionBegin;
1660   ierr = PetscViewerASCIIPrintf(viewer, "PTScotch Graph Partitioner:\n");CHKERRQ(ierr);
1661   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1662   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1663   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1664   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1665   PetscFunctionReturn(0);
1666 }
1667 
1668 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1669 {
1670   PetscBool      iascii;
1671   PetscErrorCode ierr;
1672 
1673   PetscFunctionBegin;
1674   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1675   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1676   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1677   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1678   PetscFunctionReturn(0);
1679 }
1680 
1681 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1682 {
1683   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1684   const char *const         *slist = PTScotchStrategyList;
1685   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1686   PetscBool                 flag;
1687   PetscErrorCode            ierr;
1688 
1689   PetscFunctionBegin;
1690   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1691   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1692   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1693   ierr = PetscOptionsTail();CHKERRQ(ierr);
1694   PetscFunctionReturn(0);
1695 }
1696 
1697 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1698 {
1699 #if defined(PETSC_HAVE_PTSCOTCH)
1700   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1701   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1702   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1703   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1704   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1705   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1706   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1707   PetscInt       v, i, *assignment, *points;
1708   PetscMPIInt    size, rank, p;
1709   PetscErrorCode ierr;
1710 
1711   PetscFunctionBegin;
1712   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1713   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1714   ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1715 
1716   /* Calculate vertex distribution */
1717   vtxdist[0] = 0;
1718   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1719   for (p = 2; p <= size; ++p) {
1720     vtxdist[p] += vtxdist[p-1];
1721   }
1722 
1723   if (nparts == 1) {
1724     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1725   } else {
1726     PetscSection section;
1727     /* Weight cells by dofs on cell by default */
1728     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1729     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1730     if (section) {
1731       PetscInt vStart, vEnd, dof;
1732       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1733       for (v = vStart; v < vEnd; ++v) {
1734         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1735         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1736         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1737         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1738       }
1739     } else {
1740       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1741     }
1742     {
1743       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1744       int                       strat = PTScotch_Strategy(pts->strategy);
1745       double                    imbal = (double)pts->imbalance;
1746 
1747       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1748       if (vtxdist[p+1] == vtxdist[size]) {
1749         if (rank == p) {
1750           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1751         }
1752       } else {
1753         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1754       }
1755     }
1756     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1757   }
1758 
1759   /* Convert to PetscSection+IS */
1760   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1761   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1762   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1763   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1764   for (p = 0, i = 0; p < nparts; ++p) {
1765     for (v = 0; v < nvtxs; ++v) {
1766       if (assignment[v] == p) points[i++] = v;
1767     }
1768   }
1769   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1770   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1771 
1772   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1773   PetscFunctionReturn(0);
1774 #else
1775   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1776 #endif
1777 }
1778 
1779 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1780 {
1781   PetscFunctionBegin;
1782   part->ops->view           = PetscPartitionerView_PTScotch;
1783   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1784   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1785   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1786   PetscFunctionReturn(0);
1787 }
1788 
1789 /*MC
1790   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1791 
1792   Level: intermediate
1793 
1794 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1795 M*/
1796 
1797 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1798 {
1799   PetscPartitioner_PTScotch *p;
1800   PetscErrorCode          ierr;
1801 
1802   PetscFunctionBegin;
1803   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1804   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1805   part->data = p;
1806 
1807   p->strategy  = 0;
1808   p->imbalance = 0.01;
1809 
1810   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1811   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1812   PetscFunctionReturn(0);
1813 }
1814 
1815 
1816 /*@
1817   DMPlexGetPartitioner - Get the mesh partitioner
1818 
1819   Not collective
1820 
1821   Input Parameter:
1822 . dm - The DM
1823 
1824   Output Parameter:
1825 . part - The PetscPartitioner
1826 
1827   Level: developer
1828 
1829   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1830 
1831 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1832 @*/
1833 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1834 {
1835   DM_Plex       *mesh = (DM_Plex *) dm->data;
1836 
1837   PetscFunctionBegin;
1838   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1839   PetscValidPointer(part, 2);
1840   *part = mesh->partitioner;
1841   PetscFunctionReturn(0);
1842 }
1843 
1844 /*@
1845   DMPlexSetPartitioner - Set the mesh partitioner
1846 
1847   logically collective on dm and part
1848 
1849   Input Parameters:
1850 + dm - The DM
1851 - part - The partitioner
1852 
1853   Level: developer
1854 
1855   Note: Any existing PetscPartitioner will be destroyed.
1856 
1857 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1858 @*/
1859 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1860 {
1861   DM_Plex       *mesh = (DM_Plex *) dm->data;
1862   PetscErrorCode ierr;
1863 
1864   PetscFunctionBegin;
1865   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1866   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1867   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1868   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1869   mesh->partitioner = part;
1870   PetscFunctionReturn(0);
1871 }
1872 
1873 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHashI ht, PetscInt point, PetscBool up, PetscBool down)
1874 {
1875   PetscErrorCode ierr;
1876 
1877   PetscFunctionBegin;
1878   if (up) {
1879     PetscInt parent;
1880 
1881     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1882     if (parent != point) {
1883       PetscInt closureSize, *closure = NULL, i;
1884 
1885       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1886       for (i = 0; i < closureSize; i++) {
1887         PetscInt cpoint = closure[2*i];
1888         PETSC_UNUSED PetscHashIIter iter, ret;
1889 
1890         PetscHashIPut(ht, cpoint, ret, iter);
1891         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1892       }
1893       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1894     }
1895   }
1896   if (down) {
1897     PetscInt numChildren;
1898     const PetscInt *children;
1899 
1900     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1901     if (numChildren) {
1902       PetscInt i;
1903 
1904       for (i = 0; i < numChildren; i++) {
1905         PetscInt cpoint = children[i];
1906         PETSC_UNUSED PetscHashIIter iter, ret;
1907 
1908         PetscHashIPut(ht, cpoint, ret, iter);
1909         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1910       }
1911     }
1912   }
1913   PetscFunctionReturn(0);
1914 }
1915 
1916 /*@
1917   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1918 
1919   Input Parameters:
1920 + dm     - The DM
1921 - label  - DMLabel assinging ranks to remote roots
1922 
1923   Level: developer
1924 
1925 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1926 @*/
1927 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1928 {
1929   IS              rankIS,   pointIS;
1930   const PetscInt *ranks,   *points;
1931   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1932   PetscInt       *closure = NULL;
1933   DM_Plex        *mesh    = (DM_Plex *)dm->data;
1934   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
1935   PetscErrorCode  ierr;
1936 
1937   PetscFunctionBegin;
1938   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1939   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1940   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1941 
1942   for (r = 0; r < numRanks; ++r) {
1943     const PetscInt rank = ranks[r];
1944     PetscHashI     ht;
1945     PETSC_UNUSED   PetscHashIIter iter, ret;
1946     PetscInt       nkeys, *keys, off = 0;
1947 
1948     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1949     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1950     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1951     PetscHashICreate(ht);
1952     PetscHashIResize(ht, numPoints*16);
1953     for (p = 0; p < numPoints; ++p) {
1954       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1955       for (c = 0; c < closureSize*2; c += 2) {
1956         PetscHashIPut(ht, closure[c], ret, iter);
1957         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
1958       }
1959     }
1960     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1961     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1962     PetscHashISize(ht, nkeys);
1963     ierr = PetscMalloc1(nkeys, &keys);CHKERRQ(ierr);
1964     PetscHashIGetKeys(ht, &off, keys);
1965     PetscHashIDestroy(ht);
1966     ierr = PetscSortInt(nkeys, keys);CHKERRQ(ierr);
1967     ierr = ISCreateGeneral(PETSC_COMM_SELF, nkeys, keys, PETSC_OWN_POINTER, &pointIS);CHKERRQ(ierr);
1968     ierr = DMLabelSetStratumIS(label, rank, pointIS);CHKERRQ(ierr);
1969     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1970   }
1971 
1972   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
1973   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1974   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1975   PetscFunctionReturn(0);
1976 }
1977 
1978 /*@
1979   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1980 
1981   Input Parameters:
1982 + dm     - The DM
1983 - label  - DMLabel assinging ranks to remote roots
1984 
1985   Level: developer
1986 
1987 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1988 @*/
1989 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1990 {
1991   IS              rankIS,   pointIS;
1992   const PetscInt *ranks,   *points;
1993   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1994   PetscInt       *adj = NULL;
1995   PetscErrorCode  ierr;
1996 
1997   PetscFunctionBegin;
1998   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1999   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2000   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2001   for (r = 0; r < numRanks; ++r) {
2002     const PetscInt rank = ranks[r];
2003 
2004     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2005     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2006     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2007     for (p = 0; p < numPoints; ++p) {
2008       adjSize = PETSC_DETERMINE;
2009       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2010       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2011     }
2012     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2013     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2014   }
2015   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2016   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2017   ierr = PetscFree(adj);CHKERRQ(ierr);
2018   PetscFunctionReturn(0);
2019 }
2020 
2021 /*@
2022   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2023 
2024   Input Parameters:
2025 + dm     - The DM
2026 - label  - DMLabel assinging ranks to remote roots
2027 
2028   Level: developer
2029 
2030   Note: This is required when generating multi-level overlaps to capture
2031   overlap points from non-neighbouring partitions.
2032 
2033 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2034 @*/
2035 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2036 {
2037   MPI_Comm        comm;
2038   PetscMPIInt     rank;
2039   PetscSF         sfPoint;
2040   DMLabel         lblRoots, lblLeaves;
2041   IS              rankIS, pointIS;
2042   const PetscInt *ranks;
2043   PetscInt        numRanks, r;
2044   PetscErrorCode  ierr;
2045 
2046   PetscFunctionBegin;
2047   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2048   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2049   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2050   /* Pull point contributions from remote leaves into local roots */
2051   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2052   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2053   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2054   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2055   for (r = 0; r < numRanks; ++r) {
2056     const PetscInt remoteRank = ranks[r];
2057     if (remoteRank == rank) continue;
2058     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2059     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2060     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2061   }
2062   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2063   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2064   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2065   /* Push point contributions from roots into remote leaves */
2066   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2067   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2068   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2069   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2070   for (r = 0; r < numRanks; ++r) {
2071     const PetscInt remoteRank = ranks[r];
2072     if (remoteRank == rank) continue;
2073     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2074     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2075     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2076   }
2077   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2078   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2079   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2080   PetscFunctionReturn(0);
2081 }
2082 
2083 /*@
2084   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2085 
2086   Input Parameters:
2087 + dm        - The DM
2088 . rootLabel - DMLabel assinging ranks to local roots
2089 . processSF - A star forest mapping into the local index on each remote rank
2090 
2091   Output Parameter:
2092 - leafLabel - DMLabel assinging ranks to remote roots
2093 
2094   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2095   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2096 
2097   Level: developer
2098 
2099 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2100 @*/
2101 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2102 {
2103   MPI_Comm           comm;
2104   PetscMPIInt        rank, size;
2105   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2106   PetscSF            sfPoint;
2107   PetscSFNode       *rootPoints, *leafPoints;
2108   PetscSection       rootSection, leafSection;
2109   const PetscSFNode *remote;
2110   const PetscInt    *local, *neighbors;
2111   IS                 valueIS;
2112   PetscErrorCode     ierr;
2113 
2114   PetscFunctionBegin;
2115   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2116   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2117   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2118   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2119 
2120   /* Convert to (point, rank) and use actual owners */
2121   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2122   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2123   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2124   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2125   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2126   for (n = 0; n < numNeighbors; ++n) {
2127     PetscInt numPoints;
2128 
2129     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2130     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2131   }
2132   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2133   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
2134   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
2135   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2136   for (n = 0; n < numNeighbors; ++n) {
2137     IS              pointIS;
2138     const PetscInt *points;
2139     PetscInt        off, numPoints, p;
2140 
2141     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2142     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2143     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2144     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2145     for (p = 0; p < numPoints; ++p) {
2146       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2147       else       {l = -1;}
2148       if (l >= 0) {rootPoints[off+p] = remote[l];}
2149       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2150     }
2151     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2152     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2153   }
2154   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2155   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2156   /* Communicate overlap */
2157   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2158   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2159   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2160   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
2161   for (p = 0; p < ssize; p++) {
2162     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2163   }
2164   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2165   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2166   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2167   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2168   PetscFunctionReturn(0);
2169 }
2170 
2171 /*@
2172   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2173 
2174   Input Parameters:
2175 + dm    - The DM
2176 . label - DMLabel assinging ranks to remote roots
2177 
2178   Output Parameter:
2179 - sf    - The star forest communication context encapsulating the defined mapping
2180 
2181   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2182 
2183   Level: developer
2184 
2185 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2186 @*/
2187 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2188 {
2189   PetscMPIInt     rank, size;
2190   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2191   PetscSFNode    *remotePoints;
2192   IS              remoteRootIS;
2193   const PetscInt *remoteRoots;
2194   PetscErrorCode ierr;
2195 
2196   PetscFunctionBegin;
2197   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2198   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2199 
2200   for (numRemote = 0, n = 0; n < size; ++n) {
2201     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2202     numRemote += numPoints;
2203   }
2204   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2205   /* Put owned points first */
2206   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2207   if (numPoints > 0) {
2208     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2209     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2210     for (p = 0; p < numPoints; p++) {
2211       remotePoints[idx].index = remoteRoots[p];
2212       remotePoints[idx].rank = rank;
2213       idx++;
2214     }
2215     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2216     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2217   }
2218   /* Now add remote points */
2219   for (n = 0; n < size; ++n) {
2220     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2221     if (numPoints <= 0 || n == rank) continue;
2222     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2223     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2224     for (p = 0; p < numPoints; p++) {
2225       remotePoints[idx].index = remoteRoots[p];
2226       remotePoints[idx].rank = n;
2227       idx++;
2228     }
2229     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2230     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2231   }
2232   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2233   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2234   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2235   PetscFunctionReturn(0);
2236 }
2237