xref: /petsc/src/dm/impls/plex/plexpartition.c (revision de50f1ca5680eae1187171521708d42625db62c7)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 /*@C
31   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
32 
33   Input Parameters:
34 + dm      - The mesh DM dm
35 - height  - Height of the strata from which to construct the graph
36 
37   Output Parameter:
38 + numVertices     - Number of vertices in the graph
39 . offsets         - Point offsets in the graph
40 . adjacency       - Point connectivity in the graph
41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
79   if (globalNumbering) {
80     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
81     *globalNumbering = cellNumbering;
82   }
83   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
84   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
85     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
86     if (nroots > 0) {if (cellNum[p] < 0) continue;}
87     adjSize = PETSC_DETERMINE;
88     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
89     for (a = 0; a < adjSize; ++a) {
90       const PetscInt point = adj[a];
91       if (point != p && pStart <= point && point < pEnd) {
92         PetscInt *PETSC_RESTRICT pBuf;
93         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
94         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
95         *pBuf = point;
96       }
97     }
98     (*numVertices)++;
99   }
100   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
101   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
102   /* Derive CSR graph from section/segbuffer */
103   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
104   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
105   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
106   for (idx = 0, p = pStart; p < pEnd; p++) {
107     if (nroots > 0) {if (cellNum[p] < 0) continue;}
108     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
109   }
110   vOffsets[*numVertices] = size;
111   if (offsets) *offsets = vOffsets;
112   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
113   {
114     ISLocalToGlobalMapping ltogCells;
115     PetscInt n, size, *cells_arr;
116     /* In parallel, apply a global cell numbering to the graph */
117     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
121     /* Convert to positive global cell numbers */
122     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
123     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
126     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
127   }
128   if (adjacency) *adjacency = graph;
129   /* Clean up */
130   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
131   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
132   ierr = PetscFree(adj);CHKERRQ(ierr);
133   PetscFunctionReturn(0);
134 }
135 
136 /*@C
137   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
138 
139   Collective
140 
141   Input Arguments:
142 + dm - The DMPlex
143 - cellHeight - The height of mesh points to treat as cells (default should be 0)
144 
145   Output Arguments:
146 + numVertices - The number of local vertices in the graph, or cells in the mesh.
147 . offsets     - The offset to the adjacency list for each cell
148 - adjacency   - The adjacency list for all cells
149 
150   Note: This is suitable for input to a mesh partitioner like ParMetis.
151 
152   Level: advanced
153 
154 .seealso: DMPlexCreate()
155 @*/
156 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
157 {
158   const PetscInt maxFaceCases = 30;
159   PetscInt       numFaceCases = 0;
160   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
161   PetscInt      *off, *adj;
162   PetscInt      *neighborCells = NULL;
163   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
164   PetscErrorCode ierr;
165 
166   PetscFunctionBegin;
167   /* For parallel partitioning, I think you have to communicate supports */
168   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
169   cellDim = dim - cellHeight;
170   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
171   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
172   if (cEnd - cStart == 0) {
173     if (numVertices) *numVertices = 0;
174     if (offsets)   *offsets   = NULL;
175     if (adjacency) *adjacency = NULL;
176     PetscFunctionReturn(0);
177   }
178   numCells  = cEnd - cStart;
179   faceDepth = depth - cellHeight;
180   if (dim == depth) {
181     PetscInt f, fStart, fEnd;
182 
183     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
184     /* Count neighboring cells */
185     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
186     for (f = fStart; f < fEnd; ++f) {
187       const PetscInt *support;
188       PetscInt        supportSize;
189       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
190       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
191       if (supportSize == 2) {
192         PetscInt numChildren;
193 
194         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
195         if (!numChildren) {
196           ++off[support[0]-cStart+1];
197           ++off[support[1]-cStart+1];
198         }
199       }
200     }
201     /* Prefix sum */
202     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
203     if (adjacency) {
204       PetscInt *tmp;
205 
206       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
207       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
208       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
209       /* Get neighboring cells */
210       for (f = fStart; f < fEnd; ++f) {
211         const PetscInt *support;
212         PetscInt        supportSize;
213         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
214         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
215         if (supportSize == 2) {
216           PetscInt numChildren;
217 
218           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
219           if (!numChildren) {
220             adj[tmp[support[0]-cStart]++] = support[1];
221             adj[tmp[support[1]-cStart]++] = support[0];
222           }
223         }
224       }
225 #if defined(PETSC_USE_DEBUG)
226       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
227 #endif
228       ierr = PetscFree(tmp);CHKERRQ(ierr);
229     }
230     if (numVertices) *numVertices = numCells;
231     if (offsets)   *offsets   = off;
232     if (adjacency) *adjacency = adj;
233     PetscFunctionReturn(0);
234   }
235   /* Setup face recognition */
236   if (faceDepth == 1) {
237     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
238 
239     for (c = cStart; c < cEnd; ++c) {
240       PetscInt corners;
241 
242       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
243       if (!cornersSeen[corners]) {
244         PetscInt nFV;
245 
246         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
247         cornersSeen[corners] = 1;
248 
249         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
250 
251         numFaceVertices[numFaceCases++] = nFV;
252       }
253     }
254   }
255   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
256   /* Count neighboring cells */
257   for (cell = cStart; cell < cEnd; ++cell) {
258     PetscInt numNeighbors = PETSC_DETERMINE, n;
259 
260     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
261     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
262     for (n = 0; n < numNeighbors; ++n) {
263       PetscInt        cellPair[2];
264       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
265       PetscInt        meetSize = 0;
266       const PetscInt *meet    = NULL;
267 
268       cellPair[0] = cell; cellPair[1] = neighborCells[n];
269       if (cellPair[0] == cellPair[1]) continue;
270       if (!found) {
271         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
272         if (meetSize) {
273           PetscInt f;
274 
275           for (f = 0; f < numFaceCases; ++f) {
276             if (numFaceVertices[f] == meetSize) {
277               found = PETSC_TRUE;
278               break;
279             }
280           }
281         }
282         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
283       }
284       if (found) ++off[cell-cStart+1];
285     }
286   }
287   /* Prefix sum */
288   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
289 
290   if (adjacency) {
291     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
292     /* Get neighboring cells */
293     for (cell = cStart; cell < cEnd; ++cell) {
294       PetscInt numNeighbors = PETSC_DETERMINE, n;
295       PetscInt cellOffset   = 0;
296 
297       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
298       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
299       for (n = 0; n < numNeighbors; ++n) {
300         PetscInt        cellPair[2];
301         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
302         PetscInt        meetSize = 0;
303         const PetscInt *meet    = NULL;
304 
305         cellPair[0] = cell; cellPair[1] = neighborCells[n];
306         if (cellPair[0] == cellPair[1]) continue;
307         if (!found) {
308           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
309           if (meetSize) {
310             PetscInt f;
311 
312             for (f = 0; f < numFaceCases; ++f) {
313               if (numFaceVertices[f] == meetSize) {
314                 found = PETSC_TRUE;
315                 break;
316               }
317             }
318           }
319           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
320         }
321         if (found) {
322           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
323           ++cellOffset;
324         }
325       }
326     }
327   }
328   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
329   if (numVertices) *numVertices = numCells;
330   if (offsets)   *offsets   = off;
331   if (adjacency) *adjacency = adj;
332   PetscFunctionReturn(0);
333 }
334 
335 /*@C
336   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
337 
338   Not Collective
339 
340   Input Parameters:
341 + name        - The name of a new user-defined creation routine
342 - create_func - The creation routine itself
343 
344   Notes:
345   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
346 
347   Sample usage:
348 .vb
349     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
350 .ve
351 
352   Then, your PetscPartitioner type can be chosen with the procedural interface via
353 .vb
354     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
355     PetscPartitionerSetType(PetscPartitioner, "my_part");
356 .ve
357    or at runtime via the option
358 .vb
359     -petscpartitioner_type my_part
360 .ve
361 
362   Level: advanced
363 
364 .keywords: PetscPartitioner, register
365 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
366 
367 @*/
368 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
369 {
370   PetscErrorCode ierr;
371 
372   PetscFunctionBegin;
373   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
374   PetscFunctionReturn(0);
375 }
376 
377 /*@C
378   PetscPartitionerSetType - Builds a particular PetscPartitioner
379 
380   Collective on PetscPartitioner
381 
382   Input Parameters:
383 + part - The PetscPartitioner object
384 - name - The kind of partitioner
385 
386   Options Database Key:
387 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
388 
389   Level: intermediate
390 
391 .keywords: PetscPartitioner, set, type
392 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
393 @*/
394 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
395 {
396   PetscErrorCode (*r)(PetscPartitioner);
397   PetscBool      match;
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
402   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
403   if (match) PetscFunctionReturn(0);
404 
405   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
406   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
407   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
408 
409   if (part->ops->destroy) {
410     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
411   }
412   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
413   ierr = (*r)(part);CHKERRQ(ierr);
414   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
415   PetscFunctionReturn(0);
416 }
417 
418 /*@C
419   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
420 
421   Not Collective
422 
423   Input Parameter:
424 . part - The PetscPartitioner
425 
426   Output Parameter:
427 . name - The PetscPartitioner type name
428 
429   Level: intermediate
430 
431 .keywords: PetscPartitioner, get, type, name
432 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
433 @*/
434 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
435 {
436   PetscErrorCode ierr;
437 
438   PetscFunctionBegin;
439   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
440   PetscValidPointer(name, 2);
441   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
442   *name = ((PetscObject) part)->type_name;
443   PetscFunctionReturn(0);
444 }
445 
446 /*@C
447   PetscPartitionerView - Views a PetscPartitioner
448 
449   Collective on PetscPartitioner
450 
451   Input Parameter:
452 + part - the PetscPartitioner object to view
453 - v    - the viewer
454 
455   Level: developer
456 
457 .seealso: PetscPartitionerDestroy()
458 @*/
459 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
460 {
461   PetscErrorCode ierr;
462 
463   PetscFunctionBegin;
464   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
465   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
466   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
467   PetscFunctionReturn(0);
468 }
469 
470 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
471 {
472   PetscFunctionBegin;
473   if (!currentType) {
474 #if defined(PETSC_HAVE_CHACO)
475     *defaultType = PETSCPARTITIONERCHACO;
476 #elif defined(PETSC_HAVE_PARMETIS)
477     *defaultType = PETSCPARTITIONERPARMETIS;
478 #elif defined(PETSC_HAVE_PTSCOTCH)
479     *defaultType = PETSCPARTITIONERPTSCOTCH;
480 #else
481     *defaultType = PETSCPARTITIONERSIMPLE;
482 #endif
483   } else {
484     *defaultType = currentType;
485   }
486   PetscFunctionReturn(0);
487 }
488 
489 /*@
490   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
491 
492   Collective on PetscPartitioner
493 
494   Input Parameter:
495 . part - the PetscPartitioner object to set options for
496 
497   Level: developer
498 
499 .seealso: PetscPartitionerView()
500 @*/
501 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
502 {
503   const char    *defaultType;
504   char           name[256];
505   PetscBool      flg;
506   PetscErrorCode ierr;
507 
508   PetscFunctionBegin;
509   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
510   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
511   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
512   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
513   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
514   if (flg) {
515     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
516   } else if (!((PetscObject) part)->type_name) {
517     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
518   }
519   if (part->ops->setfromoptions) {
520     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
521   }
522   /* process any options handlers added with PetscObjectAddOptionsHandler() */
523   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
524   ierr = PetscOptionsEnd();CHKERRQ(ierr);
525   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
526   PetscFunctionReturn(0);
527 }
528 
529 /*@C
530   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
531 
532   Collective on PetscPartitioner
533 
534   Input Parameter:
535 . part - the PetscPartitioner object to setup
536 
537   Level: developer
538 
539 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
540 @*/
541 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
542 {
543   PetscErrorCode ierr;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
547   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
548   PetscFunctionReturn(0);
549 }
550 
551 /*@
552   PetscPartitionerDestroy - Destroys a PetscPartitioner object
553 
554   Collective on PetscPartitioner
555 
556   Input Parameter:
557 . part - the PetscPartitioner object to destroy
558 
559   Level: developer
560 
561 .seealso: PetscPartitionerView()
562 @*/
563 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
564 {
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   if (!*part) PetscFunctionReturn(0);
569   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
570 
571   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
572   ((PetscObject) (*part))->refct = 0;
573 
574   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
575   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
576   PetscFunctionReturn(0);
577 }
578 
579 /*@
580   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
581 
582   Collective on MPI_Comm
583 
584   Input Parameter:
585 . comm - The communicator for the PetscPartitioner object
586 
587   Output Parameter:
588 . part - The PetscPartitioner object
589 
590   Level: beginner
591 
592 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
593 @*/
594 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
595 {
596   PetscPartitioner p;
597   const char       *partitionerType = NULL;
598   PetscErrorCode   ierr;
599 
600   PetscFunctionBegin;
601   PetscValidPointer(part, 2);
602   *part = NULL;
603   ierr = DMInitializePackage();CHKERRQ(ierr);
604 
605   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
606   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
607   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
608 
609   *part = p;
610   PetscFunctionReturn(0);
611 }
612 
613 /*@
614   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
615 
616   Collective on DM
617 
618   Input Parameters:
619 + part    - The PetscPartitioner
620 - dm      - The mesh DM
621 
622   Output Parameters:
623 + partSection     - The PetscSection giving the division of points by partition
624 - partition       - The list of points by partition
625 
626   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
627 
628   Level: developer
629 
630 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
631 @*/
632 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
633 {
634   PetscMPIInt    size;
635   PetscErrorCode ierr;
636 
637   PetscFunctionBegin;
638   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
639   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
640   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
641   PetscValidPointer(partition, 5);
642   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
643   if (size == 1) {
644     PetscInt *points;
645     PetscInt  cStart, cEnd, c;
646 
647     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
648     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
649     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
650     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
651     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
652     for (c = cStart; c < cEnd; ++c) points[c] = c;
653     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
654     PetscFunctionReturn(0);
655   }
656   if (part->height == 0) {
657     PetscInt numVertices;
658     PetscInt *start     = NULL;
659     PetscInt *adjacency = NULL;
660     IS       globalNumbering;
661 
662     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
663     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
664     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
665     ierr = PetscFree(start);CHKERRQ(ierr);
666     ierr = PetscFree(adjacency);CHKERRQ(ierr);
667     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
668       const PetscInt *globalNum;
669       const PetscInt *partIdx;
670       PetscInt *map, cStart, cEnd;
671       PetscInt *adjusted, i, localSize, offset;
672       IS    newPartition;
673 
674       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
675       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
676       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
677       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
678       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
679       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
680       for (i = cStart, offset = 0; i < cEnd; i++) {
681         if (globalNum[i - cStart] >= 0) map[offset++] = i;
682       }
683       for (i = 0; i < localSize; i++) {
684         adjusted[i] = map[partIdx[i]];
685       }
686       ierr = PetscFree(map);CHKERRQ(ierr);
687       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
688       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
689       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
690       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
691       ierr = ISDestroy(partition);CHKERRQ(ierr);
692       *partition = newPartition;
693     }
694   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
695   PetscFunctionReturn(0);
696 
697 }
698 
699 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
700 {
701   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
702   PetscErrorCode          ierr;
703 
704   PetscFunctionBegin;
705   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
706   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
707   ierr = PetscFree(p);CHKERRQ(ierr);
708   PetscFunctionReturn(0);
709 }
710 
711 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
712 {
713   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
714   PetscViewerFormat       format;
715   PetscErrorCode          ierr;
716 
717   PetscFunctionBegin;
718   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
719   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
720   if (p->random) {
721     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
722     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
723     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
724   }
725   PetscFunctionReturn(0);
726 }
727 
728 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
729 {
730   PetscBool      iascii;
731   PetscErrorCode ierr;
732 
733   PetscFunctionBegin;
734   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
735   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
736   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
737   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
738   PetscFunctionReturn(0);
739 }
740 
741 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
742 {
743   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
744   PetscErrorCode          ierr;
745 
746   PetscFunctionBegin;
747   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
748   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
749   ierr = PetscOptionsTail();CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
754 {
755   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
756   PetscInt                np;
757   PetscErrorCode          ierr;
758 
759   PetscFunctionBegin;
760   if (p->random) {
761     PetscRandom r;
762     PetscInt   *sizes, *points, v, p;
763     PetscMPIInt rank;
764 
765     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
766     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
767     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
768     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
769     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
770     for (v = 0; v < numVertices; ++v) {points[v] = v;}
771     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
772     for (v = numVertices-1; v > 0; --v) {
773       PetscReal val;
774       PetscInt  w, tmp;
775 
776       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
777       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
778       w    = PetscFloorReal(val);
779       tmp       = points[v];
780       points[v] = points[w];
781       points[w] = tmp;
782     }
783     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
784     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
785     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
786   }
787   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
788   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
789   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
790   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
791   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
792   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
793   *partition = p->partition;
794   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
795   PetscFunctionReturn(0);
796 }
797 
798 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
799 {
800   PetscFunctionBegin;
801   part->ops->view           = PetscPartitionerView_Shell;
802   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
803   part->ops->destroy        = PetscPartitionerDestroy_Shell;
804   part->ops->partition      = PetscPartitionerPartition_Shell;
805   PetscFunctionReturn(0);
806 }
807 
808 /*MC
809   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
810 
811   Level: intermediate
812 
813 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
814 M*/
815 
816 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
817 {
818   PetscPartitioner_Shell *p;
819   PetscErrorCode          ierr;
820 
821   PetscFunctionBegin;
822   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
823   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
824   part->data = p;
825 
826   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
827   p->random = PETSC_FALSE;
828   PetscFunctionReturn(0);
829 }
830 
831 /*@C
832   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
833 
834   Collective on Part
835 
836   Input Parameters:
837 + part     - The PetscPartitioner
838 . size - The number of partitions
839 . sizes    - array of size size (or NULL) providing the number of points in each partition
840 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
841 
842   Level: developer
843 
844   Notes:
845 
846     It is safe to free the sizes and points arrays after use in this routine.
847 
848 .seealso DMPlexDistribute(), PetscPartitionerCreate()
849 @*/
850 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
851 {
852   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
853   PetscInt                proc, numPoints;
854   PetscErrorCode          ierr;
855 
856   PetscFunctionBegin;
857   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
858   if (sizes)  {PetscValidPointer(sizes, 3);}
859   if (points) {PetscValidPointer(points, 4);}
860   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
861   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
862   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
863   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
864   if (sizes) {
865     for (proc = 0; proc < size; ++proc) {
866       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
867     }
868   }
869   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
870   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
871   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
872   PetscFunctionReturn(0);
873 }
874 
875 /*@
876   PetscPartitionerShellSetRandom - Set the flag to use a random partition
877 
878   Collective on Part
879 
880   Input Parameters:
881 + part   - The PetscPartitioner
882 - random - The flag to use a random partition
883 
884   Level: intermediate
885 
886 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
887 @*/
888 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
889 {
890   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
891 
892   PetscFunctionBegin;
893   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
894   p->random = random;
895   PetscFunctionReturn(0);
896 }
897 
898 /*@
899   PetscPartitionerShellGetRandom - get the flag to use a random partition
900 
901   Collective on Part
902 
903   Input Parameter:
904 . part   - The PetscPartitioner
905 
906   Output Parameter
907 . random - The flag to use a random partition
908 
909   Level: intermediate
910 
911 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
912 @*/
913 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
914 {
915   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
916 
917   PetscFunctionBegin;
918   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
919   PetscValidPointer(random, 2);
920   *random = p->random;
921   PetscFunctionReturn(0);
922 }
923 
924 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
925 {
926   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
927   PetscErrorCode          ierr;
928 
929   PetscFunctionBegin;
930   ierr = PetscFree(p);CHKERRQ(ierr);
931   PetscFunctionReturn(0);
932 }
933 
934 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
935 {
936   PetscViewerFormat format;
937   PetscErrorCode    ierr;
938 
939   PetscFunctionBegin;
940   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
941   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
946 {
947   PetscBool      iascii;
948   PetscErrorCode ierr;
949 
950   PetscFunctionBegin;
951   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
952   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
953   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
954   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
955   PetscFunctionReturn(0);
956 }
957 
958 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
959 {
960   MPI_Comm       comm;
961   PetscInt       np;
962   PetscMPIInt    size;
963   PetscErrorCode ierr;
964 
965   PetscFunctionBegin;
966   comm = PetscObjectComm((PetscObject)dm);
967   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
968   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
969   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
970   if (size == 1) {
971     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
972   }
973   else {
974     PetscMPIInt rank;
975     PetscInt nvGlobal, *offsets, myFirst, myLast;
976 
977     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
978     offsets[0] = 0;
979     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
980     for (np = 2; np <= size; np++) {
981       offsets[np] += offsets[np-1];
982     }
983     nvGlobal = offsets[size];
984     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
985     myFirst = offsets[rank];
986     myLast  = offsets[rank + 1] - 1;
987     ierr = PetscFree(offsets);CHKERRQ(ierr);
988     if (numVertices) {
989       PetscInt firstPart = 0, firstLargePart = 0;
990       PetscInt lastPart = 0, lastLargePart = 0;
991       PetscInt rem = nvGlobal % nparts;
992       PetscInt pSmall = nvGlobal/nparts;
993       PetscInt pBig = nvGlobal/nparts + 1;
994 
995 
996       if (rem) {
997         firstLargePart = myFirst / pBig;
998         lastLargePart  = myLast  / pBig;
999 
1000         if (firstLargePart < rem) {
1001           firstPart = firstLargePart;
1002         }
1003         else {
1004           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1005         }
1006         if (lastLargePart < rem) {
1007           lastPart = lastLargePart;
1008         }
1009         else {
1010           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1011         }
1012       }
1013       else {
1014         firstPart = myFirst / (nvGlobal/nparts);
1015         lastPart  = myLast  / (nvGlobal/nparts);
1016       }
1017 
1018       for (np = firstPart; np <= lastPart; np++) {
1019         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1020         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1021 
1022         PartStart = PetscMax(PartStart,myFirst);
1023         PartEnd   = PetscMin(PartEnd,myLast+1);
1024         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1025       }
1026     }
1027   }
1028   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1033 {
1034   PetscFunctionBegin;
1035   part->ops->view      = PetscPartitionerView_Simple;
1036   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1037   part->ops->partition = PetscPartitionerPartition_Simple;
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 /*MC
1042   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1043 
1044   Level: intermediate
1045 
1046 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1047 M*/
1048 
1049 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1050 {
1051   PetscPartitioner_Simple *p;
1052   PetscErrorCode           ierr;
1053 
1054   PetscFunctionBegin;
1055   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1056   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1057   part->data = p;
1058 
1059   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1060   PetscFunctionReturn(0);
1061 }
1062 
1063 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1064 {
1065   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1066   PetscErrorCode          ierr;
1067 
1068   PetscFunctionBegin;
1069   ierr = PetscFree(p);CHKERRQ(ierr);
1070   PetscFunctionReturn(0);
1071 }
1072 
1073 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1074 {
1075   PetscViewerFormat format;
1076   PetscErrorCode    ierr;
1077 
1078   PetscFunctionBegin;
1079   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1080   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1085 {
1086   PetscBool      iascii;
1087   PetscErrorCode ierr;
1088 
1089   PetscFunctionBegin;
1090   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1091   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1092   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1093   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1094   PetscFunctionReturn(0);
1095 }
1096 
1097 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1098 {
1099   PetscInt       np;
1100   PetscErrorCode ierr;
1101 
1102   PetscFunctionBegin;
1103   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1104   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1105   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1106   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1107   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1112 {
1113   PetscFunctionBegin;
1114   part->ops->view      = PetscPartitionerView_Gather;
1115   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1116   part->ops->partition = PetscPartitionerPartition_Gather;
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 /*MC
1121   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1122 
1123   Level: intermediate
1124 
1125 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1126 M*/
1127 
1128 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1129 {
1130   PetscPartitioner_Gather *p;
1131   PetscErrorCode           ierr;
1132 
1133   PetscFunctionBegin;
1134   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1135   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1136   part->data = p;
1137 
1138   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1139   PetscFunctionReturn(0);
1140 }
1141 
1142 
1143 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1144 {
1145   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1146   PetscErrorCode          ierr;
1147 
1148   PetscFunctionBegin;
1149   ierr = PetscFree(p);CHKERRQ(ierr);
1150   PetscFunctionReturn(0);
1151 }
1152 
1153 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1154 {
1155   PetscViewerFormat format;
1156   PetscErrorCode    ierr;
1157 
1158   PetscFunctionBegin;
1159   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1160   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1161   PetscFunctionReturn(0);
1162 }
1163 
1164 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1165 {
1166   PetscBool      iascii;
1167   PetscErrorCode ierr;
1168 
1169   PetscFunctionBegin;
1170   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1171   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1172   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1173   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1174   PetscFunctionReturn(0);
1175 }
1176 
1177 #if defined(PETSC_HAVE_CHACO)
1178 #if defined(PETSC_HAVE_UNISTD_H)
1179 #include <unistd.h>
1180 #endif
1181 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1182 #include <chaco.h>
1183 #else
1184 /* Older versions of Chaco do not have an include file */
1185 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1186                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1187                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1188                        int mesh_dims[3], double *goal, int global_method, int local_method,
1189                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1190 #endif
1191 extern int FREE_GRAPH;
1192 #endif
1193 
1194 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1195 {
1196 #if defined(PETSC_HAVE_CHACO)
1197   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1198   MPI_Comm       comm;
1199   int            nvtxs          = numVertices; /* number of vertices in full graph */
1200   int           *vwgts          = NULL;   /* weights for all vertices */
1201   float         *ewgts          = NULL;   /* weights for all edges */
1202   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1203   char          *outassignname  = NULL;   /*  name of assignment output file */
1204   char          *outfilename    = NULL;   /* output file name */
1205   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1206   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1207   int            mesh_dims[3];            /* dimensions of mesh of processors */
1208   double        *goal          = NULL;    /* desired set sizes for each set */
1209   int            global_method = 1;       /* global partitioning algorithm */
1210   int            local_method  = 1;       /* local partitioning algorithm */
1211   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1212   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1213   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1214   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1215   long           seed          = 123636512; /* for random graph mutations */
1216 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1217   int           *assignment;              /* Output partition */
1218 #else
1219   short int     *assignment;              /* Output partition */
1220 #endif
1221   int            fd_stdout, fd_pipe[2];
1222   PetscInt      *points;
1223   int            i, v, p;
1224   PetscErrorCode ierr;
1225 
1226   PetscFunctionBegin;
1227   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1228 #if defined (PETSC_USE_DEBUG)
1229   {
1230     int ival,isum;
1231     PetscBool distributed;
1232 
1233     ival = (numVertices > 0);
1234     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1235     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1236     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1237   }
1238 #endif
1239   if (!numVertices) {
1240     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1241     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1242     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1243     PetscFunctionReturn(0);
1244   }
1245   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1246   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1247 
1248   if (global_method == INERTIAL_METHOD) {
1249     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1250     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1251   }
1252   mesh_dims[0] = nparts;
1253   mesh_dims[1] = 1;
1254   mesh_dims[2] = 1;
1255   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1256   /* Chaco outputs to stdout. We redirect this to a buffer. */
1257   /* TODO: check error codes for UNIX calls */
1258 #if defined(PETSC_HAVE_UNISTD_H)
1259   {
1260     int piperet;
1261     piperet = pipe(fd_pipe);
1262     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1263     fd_stdout = dup(1);
1264     close(1);
1265     dup2(fd_pipe[1], 1);
1266   }
1267 #endif
1268   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1269                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1270                    vmax, ndims, eigtol, seed);
1271 #if defined(PETSC_HAVE_UNISTD_H)
1272   {
1273     char msgLog[10000];
1274     int  count;
1275 
1276     fflush(stdout);
1277     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1278     if (count < 0) count = 0;
1279     msgLog[count] = 0;
1280     close(1);
1281     dup2(fd_stdout, 1);
1282     close(fd_stdout);
1283     close(fd_pipe[0]);
1284     close(fd_pipe[1]);
1285     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1286   }
1287 #else
1288   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1289 #endif
1290   /* Convert to PetscSection+IS */
1291   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1292   for (v = 0; v < nvtxs; ++v) {
1293     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1294   }
1295   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1296   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1297   for (p = 0, i = 0; p < nparts; ++p) {
1298     for (v = 0; v < nvtxs; ++v) {
1299       if (assignment[v] == p) points[i++] = v;
1300     }
1301   }
1302   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1303   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1304   if (global_method == INERTIAL_METHOD) {
1305     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1306   }
1307   ierr = PetscFree(assignment);CHKERRQ(ierr);
1308   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1309   PetscFunctionReturn(0);
1310 #else
1311   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1312 #endif
1313 }
1314 
1315 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1316 {
1317   PetscFunctionBegin;
1318   part->ops->view      = PetscPartitionerView_Chaco;
1319   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1320   part->ops->partition = PetscPartitionerPartition_Chaco;
1321   PetscFunctionReturn(0);
1322 }
1323 
1324 /*MC
1325   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1326 
1327   Level: intermediate
1328 
1329 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1330 M*/
1331 
1332 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1333 {
1334   PetscPartitioner_Chaco *p;
1335   PetscErrorCode          ierr;
1336 
1337   PetscFunctionBegin;
1338   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1339   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1340   part->data = p;
1341 
1342   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1343   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1344   PetscFunctionReturn(0);
1345 }
1346 
1347 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1348 {
1349   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1350   PetscErrorCode             ierr;
1351 
1352   PetscFunctionBegin;
1353   ierr = PetscFree(p);CHKERRQ(ierr);
1354   PetscFunctionReturn(0);
1355 }
1356 
1357 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1358 {
1359   PetscViewerFormat format;
1360   PetscErrorCode    ierr;
1361 
1362   PetscFunctionBegin;
1363   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1364   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1365   PetscFunctionReturn(0);
1366 }
1367 
1368 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1369 {
1370   PetscBool      iascii;
1371   PetscErrorCode ierr;
1372 
1373   PetscFunctionBegin;
1374   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1375   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1376   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1377   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1378   PetscFunctionReturn(0);
1379 }
1380 
1381 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1382 {
1383   static const char         *ptypes[] = {"kway", "rb"};
1384   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1385   PetscErrorCode            ierr;
1386 
1387   PetscFunctionBegin;
1388   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1389   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1390   ierr = PetscOptionsTail();CHKERRQ(ierr);
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 #if defined(PETSC_HAVE_PARMETIS)
1395 #include <parmetis.h>
1396 #endif
1397 
1398 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1399 {
1400 #if defined(PETSC_HAVE_PARMETIS)
1401   MPI_Comm       comm;
1402   PetscSection   section;
1403   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1404   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1405   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1406   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1407   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1408   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1409   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1410   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1411   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1412   real_t        *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1413   real_t        *ubvec;                    /* The balance intolerance for vertex weights */
1414   PetscInt       options[64];              /* Options */
1415   /* Outputs */
1416   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1417   PetscInt       v, i, *assignment, *points;
1418   PetscMPIInt    size, rank, p;
1419   PetscInt       metis_ptype = ((PetscPartitioner_ParMetis *) part->data)->ptype;
1420   PetscErrorCode ierr;
1421 
1422   PetscFunctionBegin;
1423   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1424   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1425   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1426   /* Calculate vertex distribution */
1427   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1428   vtxdist[0] = 0;
1429   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1430   for (p = 2; p <= size; ++p) {
1431     vtxdist[p] += vtxdist[p-1];
1432   }
1433   /* Calculate partition weights */
1434   for (p = 0; p < nparts; ++p) {
1435     tpwgts[p] = 1.0/nparts;
1436   }
1437   ubvec[0] = 1.05;
1438   /* Weight cells by dofs on cell by default */
1439   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1440   if (section) {
1441     PetscInt cStart, cEnd, dof;
1442 
1443     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1444     for (v = cStart; v < cEnd; ++v) {
1445       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1446       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1447       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1448       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1449     }
1450   } else {
1451     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1452   }
1453   wgtflag |= 2; /* have weights on graph vertices */
1454 
1455   if (nparts == 1) {
1456     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1457   } else {
1458     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1459     if (vtxdist[p+1] == vtxdist[size]) {
1460       if (rank == p) {
1461         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1462         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1463         if (metis_ptype == 1) {
1464           PetscStackPush("METIS_PartGraphRecursive");
1465           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &edgeCut, assignment);
1466           PetscStackPop;
1467           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1468         } else {
1469           /*
1470            It would be nice to activate the two options below, but they would need some actual testing.
1471            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1472            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1473           */
1474           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1475           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1476           PetscStackPush("METIS_PartGraphKway");
1477           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &edgeCut, assignment);
1478           PetscStackPop;
1479           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1480         }
1481       }
1482     } else {
1483       options[0] = 0; /* use all defaults */
1484       PetscStackPush("ParMETIS_V3_PartKway");
1485       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1486       PetscStackPop;
1487       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1488     }
1489   }
1490   /* Convert to PetscSection+IS */
1491   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1492   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1493   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1494   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1495   for (p = 0, i = 0; p < nparts; ++p) {
1496     for (v = 0; v < nvtxs; ++v) {
1497       if (assignment[v] == p) points[i++] = v;
1498     }
1499   }
1500   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1501   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1502   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1503   PetscFunctionReturn(0);
1504 #else
1505   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1506 #endif
1507 }
1508 
1509 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1510 {
1511   PetscFunctionBegin;
1512   part->ops->view           = PetscPartitionerView_ParMetis;
1513   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1514   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1515   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1516   PetscFunctionReturn(0);
1517 }
1518 
1519 /*MC
1520   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1521 
1522   Level: intermediate
1523 
1524 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1525 M*/
1526 
1527 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1528 {
1529   PetscPartitioner_ParMetis *p;
1530   PetscErrorCode          ierr;
1531 
1532   PetscFunctionBegin;
1533   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1534   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1535   part->data = p;
1536 
1537   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1538   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1539   PetscFunctionReturn(0);
1540 }
1541 
1542 
1543 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1544 const char PTScotchPartitionerCitation[] =
1545   "@article{PTSCOTCH,\n"
1546   "  author  = {C. Chevalier and F. Pellegrini},\n"
1547   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1548   "  journal = {Parallel Computing},\n"
1549   "  volume  = {34},\n"
1550   "  number  = {6},\n"
1551   "  pages   = {318--331},\n"
1552   "  year    = {2008},\n"
1553   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1554   "}\n";
1555 
1556 typedef struct {
1557   PetscInt  strategy;
1558   PetscReal imbalance;
1559 } PetscPartitioner_PTScotch;
1560 
1561 static const char *const
1562 PTScotchStrategyList[] = {
1563   "DEFAULT",
1564   "QUALITY",
1565   "SPEED",
1566   "BALANCE",
1567   "SAFETY",
1568   "SCALABILITY",
1569   "RECURSIVE",
1570   "REMAP"
1571 };
1572 
1573 #if defined(PETSC_HAVE_PTSCOTCH)
1574 
1575 EXTERN_C_BEGIN
1576 #include <ptscotch.h>
1577 EXTERN_C_END
1578 
1579 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1580 
1581 static int PTScotch_Strategy(PetscInt strategy)
1582 {
1583   switch (strategy) {
1584   case  0: return SCOTCH_STRATDEFAULT;
1585   case  1: return SCOTCH_STRATQUALITY;
1586   case  2: return SCOTCH_STRATSPEED;
1587   case  3: return SCOTCH_STRATBALANCE;
1588   case  4: return SCOTCH_STRATSAFETY;
1589   case  5: return SCOTCH_STRATSCALABILITY;
1590   case  6: return SCOTCH_STRATRECURSIVE;
1591   case  7: return SCOTCH_STRATREMAP;
1592   default: return SCOTCH_STRATDEFAULT;
1593   }
1594 }
1595 
1596 
1597 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1598                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1599 {
1600   SCOTCH_Graph   grafdat;
1601   SCOTCH_Strat   stradat;
1602   SCOTCH_Num     vertnbr = n;
1603   SCOTCH_Num     edgenbr = xadj[n];
1604   SCOTCH_Num*    velotab = vtxwgt;
1605   SCOTCH_Num*    edlotab = adjwgt;
1606   SCOTCH_Num     flagval = strategy;
1607   double         kbalval = imbalance;
1608   PetscErrorCode ierr;
1609 
1610   PetscFunctionBegin;
1611   {
1612     PetscBool flg = PETSC_TRUE;
1613     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1614     if (!flg) velotab = NULL;
1615   }
1616   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1617   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1618   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1619   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1620 #if defined(PETSC_USE_DEBUG)
1621   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1622 #endif
1623   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1624   SCOTCH_stratExit(&stradat);
1625   SCOTCH_graphExit(&grafdat);
1626   PetscFunctionReturn(0);
1627 }
1628 
1629 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1630                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1631 {
1632   PetscMPIInt     procglbnbr;
1633   PetscMPIInt     proclocnum;
1634   SCOTCH_Arch     archdat;
1635   SCOTCH_Dgraph   grafdat;
1636   SCOTCH_Dmapping mappdat;
1637   SCOTCH_Strat    stradat;
1638   SCOTCH_Num      vertlocnbr;
1639   SCOTCH_Num      edgelocnbr;
1640   SCOTCH_Num*     veloloctab = vtxwgt;
1641   SCOTCH_Num*     edloloctab = adjwgt;
1642   SCOTCH_Num      flagval = strategy;
1643   double          kbalval = imbalance;
1644   PetscErrorCode  ierr;
1645 
1646   PetscFunctionBegin;
1647   {
1648     PetscBool flg = PETSC_TRUE;
1649     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1650     if (!flg) veloloctab = NULL;
1651   }
1652   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1653   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1654   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1655   edgelocnbr = xadj[vertlocnbr];
1656 
1657   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1658   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1659 #if defined(PETSC_USE_DEBUG)
1660   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1661 #endif
1662   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1663   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1664   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1665   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1666   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1667   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1668   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1669   SCOTCH_archExit(&archdat);
1670   SCOTCH_stratExit(&stradat);
1671   SCOTCH_dgraphExit(&grafdat);
1672   PetscFunctionReturn(0);
1673 }
1674 
1675 #endif /* PETSC_HAVE_PTSCOTCH */
1676 
1677 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1678 {
1679   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1680   PetscErrorCode             ierr;
1681 
1682   PetscFunctionBegin;
1683   ierr = PetscFree(p);CHKERRQ(ierr);
1684   PetscFunctionReturn(0);
1685 }
1686 
1687 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1688 {
1689   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1690   PetscErrorCode            ierr;
1691 
1692   PetscFunctionBegin;
1693   ierr = PetscViewerASCIIPrintf(viewer, "PTScotch Graph Partitioner:\n");CHKERRQ(ierr);
1694   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1695   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1696   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1697   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1698   PetscFunctionReturn(0);
1699 }
1700 
1701 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1702 {
1703   PetscBool      iascii;
1704   PetscErrorCode ierr;
1705 
1706   PetscFunctionBegin;
1707   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1708   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1709   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1710   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1711   PetscFunctionReturn(0);
1712 }
1713 
1714 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1715 {
1716   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1717   const char *const         *slist = PTScotchStrategyList;
1718   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1719   PetscBool                 flag;
1720   PetscErrorCode            ierr;
1721 
1722   PetscFunctionBegin;
1723   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1724   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1725   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1726   ierr = PetscOptionsTail();CHKERRQ(ierr);
1727   PetscFunctionReturn(0);
1728 }
1729 
1730 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1731 {
1732 #if defined(PETSC_HAVE_PTSCOTCH)
1733   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1734   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1735   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1736   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1737   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1738   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1739   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1740   PetscInt       v, i, *assignment, *points;
1741   PetscMPIInt    size, rank, p;
1742   PetscErrorCode ierr;
1743 
1744   PetscFunctionBegin;
1745   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1746   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1747   ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1748 
1749   /* Calculate vertex distribution */
1750   vtxdist[0] = 0;
1751   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1752   for (p = 2; p <= size; ++p) {
1753     vtxdist[p] += vtxdist[p-1];
1754   }
1755 
1756   if (nparts == 1) {
1757     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1758   } else {
1759     PetscSection section;
1760     /* Weight cells by dofs on cell by default */
1761     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1762     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1763     if (section) {
1764       PetscInt vStart, vEnd, dof;
1765       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1766       for (v = vStart; v < vEnd; ++v) {
1767         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1768         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1769         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1770         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1771       }
1772     } else {
1773       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1774     }
1775     {
1776       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1777       int                       strat = PTScotch_Strategy(pts->strategy);
1778       double                    imbal = (double)pts->imbalance;
1779 
1780       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1781       if (vtxdist[p+1] == vtxdist[size]) {
1782         if (rank == p) {
1783           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1784         }
1785       } else {
1786         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1787       }
1788     }
1789     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1790   }
1791 
1792   /* Convert to PetscSection+IS */
1793   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1794   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1795   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1796   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1797   for (p = 0, i = 0; p < nparts; ++p) {
1798     for (v = 0; v < nvtxs; ++v) {
1799       if (assignment[v] == p) points[i++] = v;
1800     }
1801   }
1802   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1803   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1804 
1805   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1806   PetscFunctionReturn(0);
1807 #else
1808   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1809 #endif
1810 }
1811 
1812 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1813 {
1814   PetscFunctionBegin;
1815   part->ops->view           = PetscPartitionerView_PTScotch;
1816   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1817   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1818   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1819   PetscFunctionReturn(0);
1820 }
1821 
1822 /*MC
1823   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1824 
1825   Level: intermediate
1826 
1827 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1828 M*/
1829 
1830 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1831 {
1832   PetscPartitioner_PTScotch *p;
1833   PetscErrorCode          ierr;
1834 
1835   PetscFunctionBegin;
1836   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1837   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1838   part->data = p;
1839 
1840   p->strategy  = 0;
1841   p->imbalance = 0.01;
1842 
1843   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1844   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1845   PetscFunctionReturn(0);
1846 }
1847 
1848 
1849 /*@
1850   DMPlexGetPartitioner - Get the mesh partitioner
1851 
1852   Not collective
1853 
1854   Input Parameter:
1855 . dm - The DM
1856 
1857   Output Parameter:
1858 . part - The PetscPartitioner
1859 
1860   Level: developer
1861 
1862   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1863 
1864 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1865 @*/
1866 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1867 {
1868   DM_Plex       *mesh = (DM_Plex *) dm->data;
1869 
1870   PetscFunctionBegin;
1871   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1872   PetscValidPointer(part, 2);
1873   *part = mesh->partitioner;
1874   PetscFunctionReturn(0);
1875 }
1876 
1877 /*@
1878   DMPlexSetPartitioner - Set the mesh partitioner
1879 
1880   logically collective on dm and part
1881 
1882   Input Parameters:
1883 + dm - The DM
1884 - part - The partitioner
1885 
1886   Level: developer
1887 
1888   Note: Any existing PetscPartitioner will be destroyed.
1889 
1890 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1891 @*/
1892 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1893 {
1894   DM_Plex       *mesh = (DM_Plex *) dm->data;
1895   PetscErrorCode ierr;
1896 
1897   PetscFunctionBegin;
1898   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1899   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1900   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1901   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1902   mesh->partitioner = part;
1903   PetscFunctionReturn(0);
1904 }
1905 
1906 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
1907 {
1908   PetscErrorCode ierr;
1909 
1910   PetscFunctionBegin;
1911   if (up) {
1912     PetscInt parent;
1913 
1914     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1915     if (parent != point) {
1916       PetscInt closureSize, *closure = NULL, i;
1917 
1918       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1919       for (i = 0; i < closureSize; i++) {
1920         PetscInt cpoint = closure[2*i];
1921 
1922         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1923         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1924       }
1925       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1926     }
1927   }
1928   if (down) {
1929     PetscInt numChildren;
1930     const PetscInt *children;
1931 
1932     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1933     if (numChildren) {
1934       PetscInt i;
1935 
1936       for (i = 0; i < numChildren; i++) {
1937         PetscInt cpoint = children[i];
1938 
1939         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1940         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1941       }
1942     }
1943   }
1944   PetscFunctionReturn(0);
1945 }
1946 
1947 /*@
1948   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1949 
1950   Input Parameters:
1951 + dm     - The DM
1952 - label  - DMLabel assinging ranks to remote roots
1953 
1954   Level: developer
1955 
1956 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1957 @*/
1958 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1959 {
1960   IS              rankIS,   pointIS;
1961   const PetscInt *ranks,   *points;
1962   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1963   PetscInt       *closure = NULL;
1964   DM_Plex        *mesh    = (DM_Plex *)dm->data;
1965   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
1966   PetscErrorCode  ierr;
1967 
1968   PetscFunctionBegin;
1969   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1970   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1971   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1972 
1973   for (r = 0; r < numRanks; ++r) {
1974     const PetscInt rank = ranks[r];
1975     PetscHSetI     ht;
1976     PetscInt       nelems, *elems, off = 0;
1977 
1978     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1979     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1980     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1981     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1982     ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
1983     for (p = 0; p < numPoints; ++p) {
1984       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1985       for (c = 0; c < closureSize*2; c += 2) {
1986         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
1987         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
1988       }
1989     }
1990     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1991     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1992     ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
1993     ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
1994     ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
1995     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1996     ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
1997     ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, &pointIS);CHKERRQ(ierr);
1998     ierr = DMLabelSetStratumIS(label, rank, pointIS);CHKERRQ(ierr);
1999     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2000   }
2001 
2002   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2003   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2004   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2005   PetscFunctionReturn(0);
2006 }
2007 
2008 /*@
2009   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2010 
2011   Input Parameters:
2012 + dm     - The DM
2013 - label  - DMLabel assinging ranks to remote roots
2014 
2015   Level: developer
2016 
2017 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2018 @*/
2019 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2020 {
2021   IS              rankIS,   pointIS;
2022   const PetscInt *ranks,   *points;
2023   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2024   PetscInt       *adj = NULL;
2025   PetscErrorCode  ierr;
2026 
2027   PetscFunctionBegin;
2028   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2029   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2030   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2031   for (r = 0; r < numRanks; ++r) {
2032     const PetscInt rank = ranks[r];
2033 
2034     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2035     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2036     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2037     for (p = 0; p < numPoints; ++p) {
2038       adjSize = PETSC_DETERMINE;
2039       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2040       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2041     }
2042     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2043     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2044   }
2045   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2046   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2047   ierr = PetscFree(adj);CHKERRQ(ierr);
2048   PetscFunctionReturn(0);
2049 }
2050 
2051 /*@
2052   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2053 
2054   Input Parameters:
2055 + dm     - The DM
2056 - label  - DMLabel assinging ranks to remote roots
2057 
2058   Level: developer
2059 
2060   Note: This is required when generating multi-level overlaps to capture
2061   overlap points from non-neighbouring partitions.
2062 
2063 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2064 @*/
2065 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2066 {
2067   MPI_Comm        comm;
2068   PetscMPIInt     rank;
2069   PetscSF         sfPoint;
2070   DMLabel         lblRoots, lblLeaves;
2071   IS              rankIS, pointIS;
2072   const PetscInt *ranks;
2073   PetscInt        numRanks, r;
2074   PetscErrorCode  ierr;
2075 
2076   PetscFunctionBegin;
2077   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2078   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2079   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2080   /* Pull point contributions from remote leaves into local roots */
2081   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2082   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2083   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2084   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2085   for (r = 0; r < numRanks; ++r) {
2086     const PetscInt remoteRank = ranks[r];
2087     if (remoteRank == rank) continue;
2088     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2089     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2090     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2091   }
2092   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2093   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2094   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2095   /* Push point contributions from roots into remote leaves */
2096   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2097   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2098   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2099   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2100   for (r = 0; r < numRanks; ++r) {
2101     const PetscInt remoteRank = ranks[r];
2102     if (remoteRank == rank) continue;
2103     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2104     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2105     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2106   }
2107   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2108   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2109   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2110   PetscFunctionReturn(0);
2111 }
2112 
2113 /*@
2114   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2115 
2116   Input Parameters:
2117 + dm        - The DM
2118 . rootLabel - DMLabel assinging ranks to local roots
2119 . processSF - A star forest mapping into the local index on each remote rank
2120 
2121   Output Parameter:
2122 - leafLabel - DMLabel assinging ranks to remote roots
2123 
2124   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2125   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2126 
2127   Level: developer
2128 
2129 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2130 @*/
2131 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2132 {
2133   MPI_Comm           comm;
2134   PetscMPIInt        rank, size;
2135   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2136   PetscSF            sfPoint;
2137   PetscSFNode       *rootPoints, *leafPoints;
2138   PetscSection       rootSection, leafSection;
2139   const PetscSFNode *remote;
2140   const PetscInt    *local, *neighbors;
2141   IS                 valueIS;
2142   PetscErrorCode     ierr;
2143 
2144   PetscFunctionBegin;
2145   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2146   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2147   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2148   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2149 
2150   /* Convert to (point, rank) and use actual owners */
2151   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2152   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2153   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2154   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2155   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2156   for (n = 0; n < numNeighbors; ++n) {
2157     PetscInt numPoints;
2158 
2159     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2160     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2161   }
2162   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2163   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
2164   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
2165   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2166   for (n = 0; n < numNeighbors; ++n) {
2167     IS              pointIS;
2168     const PetscInt *points;
2169     PetscInt        off, numPoints, p;
2170 
2171     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2172     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2173     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2174     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2175     for (p = 0; p < numPoints; ++p) {
2176       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2177       else       {l = -1;}
2178       if (l >= 0) {rootPoints[off+p] = remote[l];}
2179       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2180     }
2181     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2182     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2183   }
2184   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2185   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2186   /* Communicate overlap */
2187   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2188   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2189   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2190   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
2191   for (p = 0; p < ssize; p++) {
2192     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2193   }
2194   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2195   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2196   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2197   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2198   PetscFunctionReturn(0);
2199 }
2200 
2201 /*@
2202   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2203 
2204   Input Parameters:
2205 + dm    - The DM
2206 . label - DMLabel assinging ranks to remote roots
2207 
2208   Output Parameter:
2209 - sf    - The star forest communication context encapsulating the defined mapping
2210 
2211   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2212 
2213   Level: developer
2214 
2215 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2216 @*/
2217 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2218 {
2219   PetscMPIInt     rank, size;
2220   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2221   PetscSFNode    *remotePoints;
2222   IS              remoteRootIS;
2223   const PetscInt *remoteRoots;
2224   PetscErrorCode ierr;
2225 
2226   PetscFunctionBegin;
2227   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2228   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2229 
2230   for (numRemote = 0, n = 0; n < size; ++n) {
2231     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2232     numRemote += numPoints;
2233   }
2234   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2235   /* Put owned points first */
2236   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2237   if (numPoints > 0) {
2238     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2239     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2240     for (p = 0; p < numPoints; p++) {
2241       remotePoints[idx].index = remoteRoots[p];
2242       remotePoints[idx].rank = rank;
2243       idx++;
2244     }
2245     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2246     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2247   }
2248   /* Now add remote points */
2249   for (n = 0; n < size; ++n) {
2250     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2251     if (numPoints <= 0 || n == rank) continue;
2252     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2253     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2254     for (p = 0; p < numPoints; p++) {
2255       remotePoints[idx].index = remoteRoots[p];
2256       remotePoints[idx].rank = n;
2257       idx++;
2258     }
2259     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2260     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2261   }
2262   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2263   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2264   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2265   PetscFunctionReturn(0);
2266 }
2267