xref: /petsc/src/dm/impls/plex/plexpartition.c (revision c717d290cf26e03997d58abd9b4e4fa89d8bcb48)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410     part->ops->destroy = NULL;
411   }
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
470 {
471   const char    *defaultType;
472   char           name[256];
473   PetscBool      flg;
474   PetscErrorCode ierr;
475 
476   PetscFunctionBegin;
477   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
478   if (!((PetscObject) part)->type_name)
479 #if defined(PETSC_HAVE_CHACO)
480     defaultType = PETSCPARTITIONERCHACO;
481 #elif defined(PETSC_HAVE_PARMETIS)
482     defaultType = PETSCPARTITIONERPARMETIS;
483 #else
484     defaultType = PETSCPARTITIONERSIMPLE;
485 #endif
486   else
487     defaultType = ((PetscObject) part)->type_name;
488   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
489 
490   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
491   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
492   if (flg) {
493     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
494   } else if (!((PetscObject) part)->type_name) {
495     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
496   }
497   ierr = PetscOptionsEnd();CHKERRQ(ierr);
498   PetscFunctionReturn(0);
499 }
500 
501 /*@
502   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
503 
504   Collective on PetscPartitioner
505 
506   Input Parameter:
507 . part - the PetscPartitioner object to set options for
508 
509   Level: developer
510 
511 .seealso: PetscPartitionerView()
512 @*/
513 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
514 {
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
519   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
520 
521   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
522   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
523   /* process any options handlers added with PetscObjectAddOptionsHandler() */
524   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
525   ierr = PetscOptionsEnd();CHKERRQ(ierr);
526   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
527   PetscFunctionReturn(0);
528 }
529 
530 /*@C
531   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
532 
533   Collective on PetscPartitioner
534 
535   Input Parameter:
536 . part - the PetscPartitioner object to setup
537 
538   Level: developer
539 
540 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
541 @*/
542 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
543 {
544   PetscErrorCode ierr;
545 
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
548   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
549   PetscFunctionReturn(0);
550 }
551 
552 /*@
553   PetscPartitionerDestroy - Destroys a PetscPartitioner object
554 
555   Collective on PetscPartitioner
556 
557   Input Parameter:
558 . part - the PetscPartitioner object to destroy
559 
560   Level: developer
561 
562 .seealso: PetscPartitionerView()
563 @*/
564 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
565 {
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   if (!*part) PetscFunctionReturn(0);
570   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
571 
572   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
573   ((PetscObject) (*part))->refct = 0;
574 
575   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
576   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
582 
583   Collective on MPI_Comm
584 
585   Input Parameter:
586 . comm - The communicator for the PetscPartitioner object
587 
588   Output Parameter:
589 . part - The PetscPartitioner object
590 
591   Level: beginner
592 
593 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
594 @*/
595 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
596 {
597   PetscPartitioner p;
598   PetscErrorCode   ierr;
599 
600   PetscFunctionBegin;
601   PetscValidPointer(part, 2);
602   *part = NULL;
603   ierr = DMInitializePackage();CHKERRQ(ierr);
604 
605   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
606 
607   *part = p;
608   PetscFunctionReturn(0);
609 }
610 
611 /*@
612   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
613 
614   Collective on DM
615 
616   Input Parameters:
617 + part    - The PetscPartitioner
618 - dm      - The mesh DM
619 
620   Output Parameters:
621 + partSection     - The PetscSection giving the division of points by partition
622 - partition       - The list of points by partition
623 
624   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
625 
626   Level: developer
627 
628 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
629 @*/
630 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
631 {
632   PetscMPIInt    size;
633   PetscErrorCode ierr;
634 
635   PetscFunctionBegin;
636   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
637   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
638   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
639   PetscValidPointer(partition, 5);
640   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
641   if (size == 1) {
642     PetscInt *points;
643     PetscInt  cStart, cEnd, c;
644 
645     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
646     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
647     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
648     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
649     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
650     for (c = cStart; c < cEnd; ++c) points[c] = c;
651     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
652     PetscFunctionReturn(0);
653   }
654   if (part->height == 0) {
655     PetscInt numVertices;
656     PetscInt *start     = NULL;
657     PetscInt *adjacency = NULL;
658     IS       globalNumbering;
659 
660     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
661     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
662     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
663     ierr = PetscFree(start);CHKERRQ(ierr);
664     ierr = PetscFree(adjacency);CHKERRQ(ierr);
665     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
666       const PetscInt *globalNum;
667       const PetscInt *partIdx;
668       PetscInt *map, cStart, cEnd;
669       PetscInt *adjusted, i, localSize, offset;
670       IS    newPartition;
671 
672       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
673       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
674       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
675       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
676       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
677       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
678       for (i = cStart, offset = 0; i < cEnd; i++) {
679         if (globalNum[i - cStart] >= 0) map[offset++] = i;
680       }
681       for (i = 0; i < localSize; i++) {
682         adjusted[i] = map[partIdx[i]];
683       }
684       ierr = PetscFree(map);CHKERRQ(ierr);
685       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
686       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
687       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
688       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
689       ierr = ISDestroy(partition);CHKERRQ(ierr);
690       *partition = newPartition;
691     }
692   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
693   PetscFunctionReturn(0);
694 
695 }
696 
697 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
698 {
699   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
700   PetscErrorCode          ierr;
701 
702   PetscFunctionBegin;
703   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
704   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
705   ierr = PetscFree(p);CHKERRQ(ierr);
706   PetscFunctionReturn(0);
707 }
708 
709 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
710 {
711   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
712   PetscViewerFormat       format;
713   PetscErrorCode          ierr;
714 
715   PetscFunctionBegin;
716   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
717   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
718   if (p->random) {
719     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
720     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
721     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
722   }
723   PetscFunctionReturn(0);
724 }
725 
726 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
727 {
728   PetscBool      iascii;
729   PetscErrorCode ierr;
730 
731   PetscFunctionBegin;
732   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
733   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
734   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
735   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
736   PetscFunctionReturn(0);
737 }
738 
739 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
740 {
741   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
742   PetscErrorCode          ierr;
743 
744   PetscFunctionBegin;
745   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
746   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
747   ierr = PetscOptionsTail();CHKERRQ(ierr);
748   PetscFunctionReturn(0);
749 }
750 
751 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
752 {
753   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
754   PetscInt                np;
755   PetscErrorCode          ierr;
756 
757   PetscFunctionBegin;
758   if (p->random) {
759     PetscRandom r;
760     PetscInt   *sizes, *points, v;
761 
762     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
763     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
764     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
765     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
766     for (v = 0; v < numVertices; ++v) {
767       PetscReal val;
768       PetscInt  part;
769 
770       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
771       part = PetscFloorReal(val);
772       points[v] = part;
773       ++sizes[part];
774     }
775     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
776     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
777     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
778   }
779   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
780   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
781   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
782   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
783   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
784   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
785   *partition = p->partition;
786   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
787   PetscFunctionReturn(0);
788 }
789 
790 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
791 {
792   PetscFunctionBegin;
793   part->ops->view           = PetscPartitionerView_Shell;
794   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
795   part->ops->destroy        = PetscPartitionerDestroy_Shell;
796   part->ops->partition      = PetscPartitionerPartition_Shell;
797   PetscFunctionReturn(0);
798 }
799 
800 /*MC
801   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
802 
803   Level: intermediate
804 
805 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
806 M*/
807 
808 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
809 {
810   PetscPartitioner_Shell *p;
811   PetscErrorCode          ierr;
812 
813   PetscFunctionBegin;
814   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
815   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
816   part->data = p;
817 
818   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
819   p->random = PETSC_FALSE;
820   PetscFunctionReturn(0);
821 }
822 
823 /*@C
824   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
825 
826   Collective on Part
827 
828   Input Parameters:
829 + part     - The PetscPartitioner
830 . size - The number of partitions
831 . sizes    - array of size size (or NULL) providing the number of points in each partition
832 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
833 
834   Level: developer
835 
836   Notes:
837 
838     It is safe to free the sizes and points arrays after use in this routine.
839 
840 .seealso DMPlexDistribute(), PetscPartitionerCreate()
841 @*/
842 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
843 {
844   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
845   PetscInt                proc, numPoints;
846   PetscErrorCode          ierr;
847 
848   PetscFunctionBegin;
849   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
850   if (sizes)  {PetscValidPointer(sizes, 3);}
851   if (points) {PetscValidPointer(points, 4);}
852   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
853   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
854   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
855   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
856   if (sizes) {
857     for (proc = 0; proc < size; ++proc) {
858       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
859     }
860   }
861   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
862   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
863   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
864   PetscFunctionReturn(0);
865 }
866 
867 /*@
868   PetscPartitionerShellSetRandom - Set the flag to use a random partition
869 
870   Collective on Part
871 
872   Input Parameters:
873 + part   - The PetscPartitioner
874 - random - The flag to use a random partition
875 
876   Level: intermediate
877 
878 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
879 @*/
880 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
881 {
882   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
883 
884   PetscFunctionBegin;
885   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
886   p->random = random;
887   PetscFunctionReturn(0);
888 }
889 
890 /*@
891   PetscPartitionerShellGetRandom - get the flag to use a random partition
892 
893   Collective on Part
894 
895   Input Parameter:
896 . part   - The PetscPartitioner
897 
898   Output Parameter
899 . random - The flag to use a random partition
900 
901   Level: intermediate
902 
903 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
904 @*/
905 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
906 {
907   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
908 
909   PetscFunctionBegin;
910   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
911   PetscValidPointer(random, 2);
912   *random = p->random;
913   PetscFunctionReturn(0);
914 }
915 
916 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
917 {
918   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
919   PetscErrorCode          ierr;
920 
921   PetscFunctionBegin;
922   ierr = PetscFree(p);CHKERRQ(ierr);
923   PetscFunctionReturn(0);
924 }
925 
926 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
927 {
928   PetscViewerFormat format;
929   PetscErrorCode    ierr;
930 
931   PetscFunctionBegin;
932   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
933   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
934   PetscFunctionReturn(0);
935 }
936 
937 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
938 {
939   PetscBool      iascii;
940   PetscErrorCode ierr;
941 
942   PetscFunctionBegin;
943   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
944   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
945   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
946   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
947   PetscFunctionReturn(0);
948 }
949 
950 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
951 {
952   MPI_Comm       comm;
953   PetscInt       np;
954   PetscMPIInt    size;
955   PetscErrorCode ierr;
956 
957   PetscFunctionBegin;
958   comm = PetscObjectComm((PetscObject)dm);
959   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
960   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
961   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
962   if (size == 1) {
963     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
964   }
965   else {
966     PetscMPIInt rank;
967     PetscInt nvGlobal, *offsets, myFirst, myLast;
968 
969     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
970     offsets[0] = 0;
971     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
972     for (np = 2; np <= size; np++) {
973       offsets[np] += offsets[np-1];
974     }
975     nvGlobal = offsets[size];
976     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
977     myFirst = offsets[rank];
978     myLast  = offsets[rank + 1] - 1;
979     ierr = PetscFree(offsets);CHKERRQ(ierr);
980     if (numVertices) {
981       PetscInt firstPart = 0, firstLargePart = 0;
982       PetscInt lastPart = 0, lastLargePart = 0;
983       PetscInt rem = nvGlobal % nparts;
984       PetscInt pSmall = nvGlobal/nparts;
985       PetscInt pBig = nvGlobal/nparts + 1;
986 
987 
988       if (rem) {
989         firstLargePart = myFirst / pBig;
990         lastLargePart  = myLast  / pBig;
991 
992         if (firstLargePart < rem) {
993           firstPart = firstLargePart;
994         }
995         else {
996           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
997         }
998         if (lastLargePart < rem) {
999           lastPart = lastLargePart;
1000         }
1001         else {
1002           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1003         }
1004       }
1005       else {
1006         firstPart = myFirst / (nvGlobal/nparts);
1007         lastPart  = myLast  / (nvGlobal/nparts);
1008       }
1009 
1010       for (np = firstPart; np <= lastPart; np++) {
1011         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1012         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1013 
1014         PartStart = PetscMax(PartStart,myFirst);
1015         PartEnd   = PetscMin(PartEnd,myLast+1);
1016         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1017       }
1018     }
1019   }
1020   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1025 {
1026   PetscFunctionBegin;
1027   part->ops->view      = PetscPartitionerView_Simple;
1028   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1029   part->ops->partition = PetscPartitionerPartition_Simple;
1030   PetscFunctionReturn(0);
1031 }
1032 
1033 /*MC
1034   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1035 
1036   Level: intermediate
1037 
1038 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1039 M*/
1040 
1041 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1042 {
1043   PetscPartitioner_Simple *p;
1044   PetscErrorCode           ierr;
1045 
1046   PetscFunctionBegin;
1047   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1048   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1049   part->data = p;
1050 
1051   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1056 {
1057   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1058   PetscErrorCode          ierr;
1059 
1060   PetscFunctionBegin;
1061   ierr = PetscFree(p);CHKERRQ(ierr);
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1066 {
1067   PetscViewerFormat format;
1068   PetscErrorCode    ierr;
1069 
1070   PetscFunctionBegin;
1071   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1072   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1073   PetscFunctionReturn(0);
1074 }
1075 
1076 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1077 {
1078   PetscBool      iascii;
1079   PetscErrorCode ierr;
1080 
1081   PetscFunctionBegin;
1082   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1083   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1084   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1085   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1086   PetscFunctionReturn(0);
1087 }
1088 
1089 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1090 {
1091   PetscInt       np;
1092   PetscErrorCode ierr;
1093 
1094   PetscFunctionBegin;
1095   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1096   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1097   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1098   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1099   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1104 {
1105   PetscFunctionBegin;
1106   part->ops->view      = PetscPartitionerView_Gather;
1107   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1108   part->ops->partition = PetscPartitionerPartition_Gather;
1109   PetscFunctionReturn(0);
1110 }
1111 
1112 /*MC
1113   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1114 
1115   Level: intermediate
1116 
1117 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1118 M*/
1119 
1120 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1121 {
1122   PetscPartitioner_Gather *p;
1123   PetscErrorCode           ierr;
1124 
1125   PetscFunctionBegin;
1126   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1127   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1128   part->data = p;
1129 
1130   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1131   PetscFunctionReturn(0);
1132 }
1133 
1134 
1135 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1136 {
1137   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1138   PetscErrorCode          ierr;
1139 
1140   PetscFunctionBegin;
1141   ierr = PetscFree(p);CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1146 {
1147   PetscViewerFormat format;
1148   PetscErrorCode    ierr;
1149 
1150   PetscFunctionBegin;
1151   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1152   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1153   PetscFunctionReturn(0);
1154 }
1155 
1156 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1157 {
1158   PetscBool      iascii;
1159   PetscErrorCode ierr;
1160 
1161   PetscFunctionBegin;
1162   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1163   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1164   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1165   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1166   PetscFunctionReturn(0);
1167 }
1168 
1169 #if defined(PETSC_HAVE_CHACO)
1170 #if defined(PETSC_HAVE_UNISTD_H)
1171 #include <unistd.h>
1172 #endif
1173 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1174 #include <chaco.h>
1175 #else
1176 /* Older versions of Chaco do not have an include file */
1177 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1178                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1179                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1180                        int mesh_dims[3], double *goal, int global_method, int local_method,
1181                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1182 #endif
1183 extern int FREE_GRAPH;
1184 #endif
1185 
1186 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1187 {
1188 #if defined(PETSC_HAVE_CHACO)
1189   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1190   MPI_Comm       comm;
1191   int            nvtxs          = numVertices; /* number of vertices in full graph */
1192   int           *vwgts          = NULL;   /* weights for all vertices */
1193   float         *ewgts          = NULL;   /* weights for all edges */
1194   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1195   char          *outassignname  = NULL;   /*  name of assignment output file */
1196   char          *outfilename    = NULL;   /* output file name */
1197   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1198   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1199   int            mesh_dims[3];            /* dimensions of mesh of processors */
1200   double        *goal          = NULL;    /* desired set sizes for each set */
1201   int            global_method = 1;       /* global partitioning algorithm */
1202   int            local_method  = 1;       /* local partitioning algorithm */
1203   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1204   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1205   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1206   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1207   long           seed          = 123636512; /* for random graph mutations */
1208 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1209   int           *assignment;              /* Output partition */
1210 #else
1211   short int     *assignment;              /* Output partition */
1212 #endif
1213   int            fd_stdout, fd_pipe[2];
1214   PetscInt      *points;
1215   int            i, v, p;
1216   PetscErrorCode ierr;
1217 
1218   PetscFunctionBegin;
1219   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1220   if (!numVertices) {
1221     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1222     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1223     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1224     PetscFunctionReturn(0);
1225   }
1226   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1227   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1228 
1229   if (global_method == INERTIAL_METHOD) {
1230     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1231     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1232   }
1233   mesh_dims[0] = nparts;
1234   mesh_dims[1] = 1;
1235   mesh_dims[2] = 1;
1236   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1237   /* Chaco outputs to stdout. We redirect this to a buffer. */
1238   /* TODO: check error codes for UNIX calls */
1239 #if defined(PETSC_HAVE_UNISTD_H)
1240   {
1241     int piperet;
1242     piperet = pipe(fd_pipe);
1243     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1244     fd_stdout = dup(1);
1245     close(1);
1246     dup2(fd_pipe[1], 1);
1247   }
1248 #endif
1249   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1250                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1251                    vmax, ndims, eigtol, seed);
1252 #if defined(PETSC_HAVE_UNISTD_H)
1253   {
1254     char msgLog[10000];
1255     int  count;
1256 
1257     fflush(stdout);
1258     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1259     if (count < 0) count = 0;
1260     msgLog[count] = 0;
1261     close(1);
1262     dup2(fd_stdout, 1);
1263     close(fd_stdout);
1264     close(fd_pipe[0]);
1265     close(fd_pipe[1]);
1266     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1267   }
1268 #endif
1269   /* Convert to PetscSection+IS */
1270   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1271   for (v = 0; v < nvtxs; ++v) {
1272     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1273   }
1274   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1275   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1276   for (p = 0, i = 0; p < nparts; ++p) {
1277     for (v = 0; v < nvtxs; ++v) {
1278       if (assignment[v] == p) points[i++] = v;
1279     }
1280   }
1281   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1282   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1283   if (global_method == INERTIAL_METHOD) {
1284     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1285   }
1286   ierr = PetscFree(assignment);CHKERRQ(ierr);
1287   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1288   PetscFunctionReturn(0);
1289 #else
1290   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1291 #endif
1292 }
1293 
1294 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1295 {
1296   PetscFunctionBegin;
1297   part->ops->view      = PetscPartitionerView_Chaco;
1298   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1299   part->ops->partition = PetscPartitionerPartition_Chaco;
1300   PetscFunctionReturn(0);
1301 }
1302 
1303 /*MC
1304   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1305 
1306   Level: intermediate
1307 
1308 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1309 M*/
1310 
1311 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1312 {
1313   PetscPartitioner_Chaco *p;
1314   PetscErrorCode          ierr;
1315 
1316   PetscFunctionBegin;
1317   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1318   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1319   part->data = p;
1320 
1321   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1322   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1323   PetscFunctionReturn(0);
1324 }
1325 
1326 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1327 {
1328   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1329   PetscErrorCode             ierr;
1330 
1331   PetscFunctionBegin;
1332   ierr = PetscFree(p);CHKERRQ(ierr);
1333   PetscFunctionReturn(0);
1334 }
1335 
1336 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1337 {
1338   PetscViewerFormat format;
1339   PetscErrorCode    ierr;
1340 
1341   PetscFunctionBegin;
1342   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1343   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1344   PetscFunctionReturn(0);
1345 }
1346 
1347 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1348 {
1349   PetscBool      iascii;
1350   PetscErrorCode ierr;
1351 
1352   PetscFunctionBegin;
1353   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1354   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1355   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1356   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1357   PetscFunctionReturn(0);
1358 }
1359 
1360 #if defined(PETSC_HAVE_PARMETIS)
1361 #include <parmetis.h>
1362 #endif
1363 
1364 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1365 {
1366 #if defined(PETSC_HAVE_PARMETIS)
1367   MPI_Comm       comm;
1368   PetscSection   section;
1369   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1370   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1371   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1372   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1373   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1374   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1375   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1376   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1377   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1378   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1379   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1380   PetscInt       options[5];               /* Options */
1381   /* Outputs */
1382   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1383   PetscInt      *assignment, *points;
1384   PetscMPIInt    rank, p, v, i;
1385   PetscErrorCode ierr;
1386 
1387   PetscFunctionBegin;
1388   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1389   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1390   options[0] = 0; /* Use all defaults */
1391   /* Calculate vertex distribution */
1392   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1393   vtxdist[0] = 0;
1394   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1395   for (p = 2; p <= nparts; ++p) {
1396     vtxdist[p] += vtxdist[p-1];
1397   }
1398   /* Calculate weights */
1399   for (p = 0; p < nparts; ++p) {
1400     tpwgts[p] = 1.0/nparts;
1401   }
1402   ubvec[0] = 1.05;
1403   /* Weight cells by dofs on cell by default */
1404   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1405   if (section) {
1406     PetscInt cStart, cEnd, dof;
1407 
1408     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1409     for (v = cStart; v < cEnd; ++v) {
1410       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1411       vwgt[v-cStart] = PetscMax(dof, 1);
1412     }
1413   } else {
1414     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1415   }
1416 
1417   if (nparts == 1) {
1418     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1419   } else {
1420     if (vtxdist[1] == vtxdist[nparts]) {
1421       if (!rank) {
1422         PetscStackPush("METIS_PartGraphKway");
1423         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1424         PetscStackPop;
1425         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1426       }
1427     } else {
1428       PetscStackPush("ParMETIS_V3_PartKway");
1429       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1430       PetscStackPop;
1431       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1432     }
1433   }
1434   /* Convert to PetscSection+IS */
1435   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1436   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1437   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1438   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1439   for (p = 0, i = 0; p < nparts; ++p) {
1440     for (v = 0; v < nvtxs; ++v) {
1441       if (assignment[v] == p) points[i++] = v;
1442     }
1443   }
1444   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1445   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1446   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1447   PetscFunctionReturn(0);
1448 #else
1449   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1450 #endif
1451 }
1452 
1453 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1454 {
1455   PetscFunctionBegin;
1456   part->ops->view      = PetscPartitionerView_ParMetis;
1457   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1458   part->ops->partition = PetscPartitionerPartition_ParMetis;
1459   PetscFunctionReturn(0);
1460 }
1461 
1462 /*MC
1463   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1464 
1465   Level: intermediate
1466 
1467 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1468 M*/
1469 
1470 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1471 {
1472   PetscPartitioner_ParMetis *p;
1473   PetscErrorCode          ierr;
1474 
1475   PetscFunctionBegin;
1476   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1477   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1478   part->data = p;
1479 
1480   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1481   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1482   PetscFunctionReturn(0);
1483 }
1484 
1485 /*@
1486   DMPlexGetPartitioner - Get the mesh partitioner
1487 
1488   Not collective
1489 
1490   Input Parameter:
1491 . dm - The DM
1492 
1493   Output Parameter:
1494 . part - The PetscPartitioner
1495 
1496   Level: developer
1497 
1498   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1499 
1500 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1501 @*/
1502 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1503 {
1504   DM_Plex       *mesh = (DM_Plex *) dm->data;
1505   PetscErrorCode ierr;
1506 
1507   PetscFunctionBegin;
1508   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1509   PetscValidPointer(part, 2);
1510   ierr = PetscPartitionerSetTypeFromOptions_Internal(mesh->partitioner);CHKERRQ(ierr);
1511   *part = mesh->partitioner;
1512   PetscFunctionReturn(0);
1513 }
1514 
1515 /*@
1516   DMPlexSetPartitioner - Set the mesh partitioner
1517 
1518   logically collective on dm and part
1519 
1520   Input Parameters:
1521 + dm - The DM
1522 - part - The partitioner
1523 
1524   Level: developer
1525 
1526   Note: Any existing PetscPartitioner will be destroyed.
1527 
1528 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1529 @*/
1530 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1531 {
1532   DM_Plex       *mesh = (DM_Plex *) dm->data;
1533   PetscErrorCode ierr;
1534 
1535   PetscFunctionBegin;
1536   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1537   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1538   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1539   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1540   mesh->partitioner = part;
1541   PetscFunctionReturn(0);
1542 }
1543 
1544 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1545 {
1546   PetscErrorCode ierr;
1547 
1548   PetscFunctionBegin;
1549   if (up) {
1550     PetscInt parent;
1551 
1552     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1553     if (parent != point) {
1554       PetscInt closureSize, *closure = NULL, i;
1555 
1556       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1557       for (i = 0; i < closureSize; i++) {
1558         PetscInt cpoint = closure[2*i];
1559 
1560         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1561         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1562       }
1563       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1564     }
1565   }
1566   if (down) {
1567     PetscInt numChildren;
1568     const PetscInt *children;
1569 
1570     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1571     if (numChildren) {
1572       PetscInt i;
1573 
1574       for (i = 0; i < numChildren; i++) {
1575         PetscInt cpoint = children[i];
1576 
1577         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1578         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1579       }
1580     }
1581   }
1582   PetscFunctionReturn(0);
1583 }
1584 
1585 /*@
1586   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1587 
1588   Input Parameters:
1589 + dm     - The DM
1590 - label  - DMLabel assinging ranks to remote roots
1591 
1592   Level: developer
1593 
1594 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1595 @*/
1596 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1597 {
1598   IS              rankIS,   pointIS;
1599   const PetscInt *ranks,   *points;
1600   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1601   PetscInt       *closure = NULL;
1602   PetscErrorCode  ierr;
1603 
1604   PetscFunctionBegin;
1605   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1606   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1607   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1608   for (r = 0; r < numRanks; ++r) {
1609     const PetscInt rank = ranks[r];
1610 
1611     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1612     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1613     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1614     for (p = 0; p < numPoints; ++p) {
1615       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1616       for (c = 0; c < closureSize*2; c += 2) {
1617         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1618         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1619       }
1620     }
1621     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1622     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1623   }
1624   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1625   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1626   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1627   PetscFunctionReturn(0);
1628 }
1629 
1630 /*@
1631   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1632 
1633   Input Parameters:
1634 + dm     - The DM
1635 - label  - DMLabel assinging ranks to remote roots
1636 
1637   Level: developer
1638 
1639 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1640 @*/
1641 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1642 {
1643   IS              rankIS,   pointIS;
1644   const PetscInt *ranks,   *points;
1645   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1646   PetscInt       *adj = NULL;
1647   PetscErrorCode  ierr;
1648 
1649   PetscFunctionBegin;
1650   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1651   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1652   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1653   for (r = 0; r < numRanks; ++r) {
1654     const PetscInt rank = ranks[r];
1655 
1656     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1657     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1658     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1659     for (p = 0; p < numPoints; ++p) {
1660       adjSize = PETSC_DETERMINE;
1661       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1662       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1663     }
1664     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1665     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1666   }
1667   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1668   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1669   ierr = PetscFree(adj);CHKERRQ(ierr);
1670   PetscFunctionReturn(0);
1671 }
1672 
1673 /*@
1674   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1675 
1676   Input Parameters:
1677 + dm     - The DM
1678 - label  - DMLabel assinging ranks to remote roots
1679 
1680   Level: developer
1681 
1682   Note: This is required when generating multi-level overlaps to capture
1683   overlap points from non-neighbouring partitions.
1684 
1685 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1686 @*/
1687 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1688 {
1689   MPI_Comm        comm;
1690   PetscMPIInt     rank;
1691   PetscSF         sfPoint;
1692   DMLabel         lblRoots, lblLeaves;
1693   IS              rankIS, pointIS;
1694   const PetscInt *ranks;
1695   PetscInt        numRanks, r;
1696   PetscErrorCode  ierr;
1697 
1698   PetscFunctionBegin;
1699   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1700   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1701   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1702   /* Pull point contributions from remote leaves into local roots */
1703   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1704   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1705   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1706   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1707   for (r = 0; r < numRanks; ++r) {
1708     const PetscInt remoteRank = ranks[r];
1709     if (remoteRank == rank) continue;
1710     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1711     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1712     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1713   }
1714   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1715   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1716   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1717   /* Push point contributions from roots into remote leaves */
1718   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1719   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1720   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1721   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1722   for (r = 0; r < numRanks; ++r) {
1723     const PetscInt remoteRank = ranks[r];
1724     if (remoteRank == rank) continue;
1725     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1726     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1727     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1728   }
1729   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1730   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1731   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1732   PetscFunctionReturn(0);
1733 }
1734 
1735 /*@
1736   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1737 
1738   Input Parameters:
1739 + dm        - The DM
1740 . rootLabel - DMLabel assinging ranks to local roots
1741 . processSF - A star forest mapping into the local index on each remote rank
1742 
1743   Output Parameter:
1744 - leafLabel - DMLabel assinging ranks to remote roots
1745 
1746   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1747   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1748 
1749   Level: developer
1750 
1751 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1752 @*/
1753 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1754 {
1755   MPI_Comm           comm;
1756   PetscMPIInt        rank, size;
1757   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
1758   PetscSF            sfPoint;
1759   PetscSFNode       *rootPoints, *leafPoints;
1760   PetscSection       rootSection, leafSection;
1761   const PetscSFNode *remote;
1762   const PetscInt    *local, *neighbors;
1763   IS                 valueIS;
1764   PetscErrorCode     ierr;
1765 
1766   PetscFunctionBegin;
1767   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1768   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1769   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1770   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1771 
1772   /* Convert to (point, rank) and use actual owners */
1773   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1774   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
1775   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1776   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1777   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1778   for (n = 0; n < numNeighbors; ++n) {
1779     PetscInt numPoints;
1780 
1781     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1782     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1783   }
1784   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1785   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
1786   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
1787   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1788   for (n = 0; n < numNeighbors; ++n) {
1789     IS              pointIS;
1790     const PetscInt *points;
1791     PetscInt        off, numPoints, p;
1792 
1793     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1794     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1795     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1796     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1797     for (p = 0; p < numPoints; ++p) {
1798       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1799       else       {l = -1;}
1800       if (l >= 0) {rootPoints[off+p] = remote[l];}
1801       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1802     }
1803     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1804     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1805   }
1806   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1807   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1808   /* Communicate overlap */
1809   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1810   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1811   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1812   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
1813   for (p = 0; p < ssize; p++) {
1814     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1815   }
1816   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1817   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1818   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1819   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1820   PetscFunctionReturn(0);
1821 }
1822 
1823 /*@
1824   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1825 
1826   Input Parameters:
1827 + dm    - The DM
1828 . label - DMLabel assinging ranks to remote roots
1829 
1830   Output Parameter:
1831 - sf    - The star forest communication context encapsulating the defined mapping
1832 
1833   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1834 
1835   Level: developer
1836 
1837 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1838 @*/
1839 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1840 {
1841   PetscMPIInt     rank, size;
1842   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1843   PetscSFNode    *remotePoints;
1844   IS              remoteRootIS;
1845   const PetscInt *remoteRoots;
1846   PetscErrorCode ierr;
1847 
1848   PetscFunctionBegin;
1849   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1850   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1851 
1852   for (numRemote = 0, n = 0; n < size; ++n) {
1853     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1854     numRemote += numPoints;
1855   }
1856   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1857   /* Put owned points first */
1858   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1859   if (numPoints > 0) {
1860     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1861     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1862     for (p = 0; p < numPoints; p++) {
1863       remotePoints[idx].index = remoteRoots[p];
1864       remotePoints[idx].rank = rank;
1865       idx++;
1866     }
1867     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1868     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1869   }
1870   /* Now add remote points */
1871   for (n = 0; n < size; ++n) {
1872     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1873     if (numPoints <= 0 || n == rank) continue;
1874     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1875     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1876     for (p = 0; p < numPoints; p++) {
1877       remotePoints[idx].index = remoteRoots[p];
1878       remotePoints[idx].rank = n;
1879       idx++;
1880     }
1881     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1882     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1883   }
1884   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1885   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1886   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1887   PetscFunctionReturn(0);
1888 }
1889