xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 6e5f5dad97d5d0d453d1db100f9dbfcf5747de2b)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410   }
411   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
470 {
471   PetscFunctionBegin;
472   if (!currentType) {
473 #if defined(PETSC_HAVE_CHACO)
474     *defaultType = PETSCPARTITIONERCHACO;
475 #elif defined(PETSC_HAVE_PARMETIS)
476     *defaultType = PETSCPARTITIONERPARMETIS;
477 #elif defined(PETSC_HAVE_PTSCOTCH)
478     *defaultType = PETSCPARTITIONERPTSCOTCH;
479 #else
480     *defaultType = PETSCPARTITIONERSIMPLE;
481 #endif
482   } else {
483     *defaultType = currentType;
484   }
485   PetscFunctionReturn(0);
486 }
487 
488 /*@
489   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
490 
491   Collective on PetscPartitioner
492 
493   Input Parameter:
494 . part - the PetscPartitioner object to set options for
495 
496   Level: developer
497 
498 .seealso: PetscPartitionerView()
499 @*/
500 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
501 {
502   const char    *defaultType;
503   char           name[256];
504   PetscBool      flg;
505   PetscErrorCode ierr;
506 
507   PetscFunctionBegin;
508   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
509   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
510   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
511   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
512   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
513   if (flg) {
514     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
515   } else if (!((PetscObject) part)->type_name) {
516     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
517   }
518   if (part->ops->setfromoptions) {
519     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
520   }
521   /* process any options handlers added with PetscObjectAddOptionsHandler() */
522   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
523   ierr = PetscOptionsEnd();CHKERRQ(ierr);
524   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
525   PetscFunctionReturn(0);
526 }
527 
528 /*@C
529   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
530 
531   Collective on PetscPartitioner
532 
533   Input Parameter:
534 . part - the PetscPartitioner object to setup
535 
536   Level: developer
537 
538 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
539 @*/
540 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
541 {
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
546   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
547   PetscFunctionReturn(0);
548 }
549 
550 /*@
551   PetscPartitionerDestroy - Destroys a PetscPartitioner object
552 
553   Collective on PetscPartitioner
554 
555   Input Parameter:
556 . part - the PetscPartitioner object to destroy
557 
558   Level: developer
559 
560 .seealso: PetscPartitionerView()
561 @*/
562 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
563 {
564   PetscErrorCode ierr;
565 
566   PetscFunctionBegin;
567   if (!*part) PetscFunctionReturn(0);
568   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
569 
570   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
571   ((PetscObject) (*part))->refct = 0;
572 
573   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
574   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
575   PetscFunctionReturn(0);
576 }
577 
578 /*@
579   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
580 
581   Collective on MPI_Comm
582 
583   Input Parameter:
584 . comm - The communicator for the PetscPartitioner object
585 
586   Output Parameter:
587 . part - The PetscPartitioner object
588 
589   Level: beginner
590 
591 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
592 @*/
593 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
594 {
595   PetscPartitioner p;
596   const char       *partitionerType = NULL;
597   PetscErrorCode   ierr;
598 
599   PetscFunctionBegin;
600   PetscValidPointer(part, 2);
601   *part = NULL;
602   ierr = DMInitializePackage();CHKERRQ(ierr);
603 
604   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
605   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
606   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
607 
608   *part = p;
609   PetscFunctionReturn(0);
610 }
611 
612 /*@
613   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
614 
615   Collective on DM
616 
617   Input Parameters:
618 + part    - The PetscPartitioner
619 - dm      - The mesh DM
620 
621   Output Parameters:
622 + partSection     - The PetscSection giving the division of points by partition
623 - partition       - The list of points by partition
624 
625   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
626 
627   Level: developer
628 
629 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
630 @*/
631 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
632 {
633   PetscMPIInt    size;
634   PetscErrorCode ierr;
635 
636   PetscFunctionBegin;
637   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
638   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
639   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
640   PetscValidPointer(partition, 5);
641   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
642   if (size == 1) {
643     PetscInt *points;
644     PetscInt  cStart, cEnd, c;
645 
646     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
647     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
648     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
649     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
650     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
651     for (c = cStart; c < cEnd; ++c) points[c] = c;
652     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
653     PetscFunctionReturn(0);
654   }
655   if (part->height == 0) {
656     PetscInt numVertices;
657     PetscInt *start     = NULL;
658     PetscInt *adjacency = NULL;
659     IS       globalNumbering;
660 
661     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
662     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
663     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
664     ierr = PetscFree(start);CHKERRQ(ierr);
665     ierr = PetscFree(adjacency);CHKERRQ(ierr);
666     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
667       const PetscInt *globalNum;
668       const PetscInt *partIdx;
669       PetscInt *map, cStart, cEnd;
670       PetscInt *adjusted, i, localSize, offset;
671       IS    newPartition;
672 
673       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
674       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
675       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
676       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
677       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
678       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
679       for (i = cStart, offset = 0; i < cEnd; i++) {
680         if (globalNum[i - cStart] >= 0) map[offset++] = i;
681       }
682       for (i = 0; i < localSize; i++) {
683         adjusted[i] = map[partIdx[i]];
684       }
685       ierr = PetscFree(map);CHKERRQ(ierr);
686       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
687       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
688       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
689       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
690       ierr = ISDestroy(partition);CHKERRQ(ierr);
691       *partition = newPartition;
692     }
693   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
694   PetscFunctionReturn(0);
695 
696 }
697 
698 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
699 {
700   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
701   PetscErrorCode          ierr;
702 
703   PetscFunctionBegin;
704   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
705   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
706   ierr = PetscFree(p);CHKERRQ(ierr);
707   PetscFunctionReturn(0);
708 }
709 
710 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
711 {
712   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
713   PetscViewerFormat       format;
714   PetscErrorCode          ierr;
715 
716   PetscFunctionBegin;
717   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
718   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
719   if (p->random) {
720     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
721     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
722     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
723   }
724   PetscFunctionReturn(0);
725 }
726 
727 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
728 {
729   PetscBool      iascii;
730   PetscErrorCode ierr;
731 
732   PetscFunctionBegin;
733   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
734   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
735   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
736   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
737   PetscFunctionReturn(0);
738 }
739 
740 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
741 {
742   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
743   PetscErrorCode          ierr;
744 
745   PetscFunctionBegin;
746   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
747   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
748   ierr = PetscOptionsTail();CHKERRQ(ierr);
749   PetscFunctionReturn(0);
750 }
751 
752 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
753 {
754   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
755   PetscInt                np;
756   PetscErrorCode          ierr;
757 
758   PetscFunctionBegin;
759   if (p->random) {
760     PetscRandom r;
761     PetscInt   *sizes, *points, v, p;
762     PetscMPIInt rank;
763 
764     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
765     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
766     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
767     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
768     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
769     for (v = 0; v < numVertices; ++v) {points[v] = v;}
770     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
771     for (v = numVertices-1; v > 0; --v) {
772       PetscReal val;
773       PetscInt  w, tmp;
774 
775       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
776       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
777       w    = PetscFloorReal(val);
778       tmp       = points[v];
779       points[v] = points[w];
780       points[w] = tmp;
781     }
782     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
783     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
784     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
785   }
786   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
787   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
788   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
789   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
790   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
791   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
792   *partition = p->partition;
793   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
794   PetscFunctionReturn(0);
795 }
796 
797 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
798 {
799   PetscFunctionBegin;
800   part->ops->view           = PetscPartitionerView_Shell;
801   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
802   part->ops->destroy        = PetscPartitionerDestroy_Shell;
803   part->ops->partition      = PetscPartitionerPartition_Shell;
804   PetscFunctionReturn(0);
805 }
806 
807 /*MC
808   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
809 
810   Level: intermediate
811 
812 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
813 M*/
814 
815 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
816 {
817   PetscPartitioner_Shell *p;
818   PetscErrorCode          ierr;
819 
820   PetscFunctionBegin;
821   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
822   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
823   part->data = p;
824 
825   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
826   p->random = PETSC_FALSE;
827   PetscFunctionReturn(0);
828 }
829 
830 /*@C
831   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
832 
833   Collective on Part
834 
835   Input Parameters:
836 + part     - The PetscPartitioner
837 . size - The number of partitions
838 . sizes    - array of size size (or NULL) providing the number of points in each partition
839 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
840 
841   Level: developer
842 
843   Notes:
844 
845     It is safe to free the sizes and points arrays after use in this routine.
846 
847 .seealso DMPlexDistribute(), PetscPartitionerCreate()
848 @*/
849 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
850 {
851   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
852   PetscInt                proc, numPoints;
853   PetscErrorCode          ierr;
854 
855   PetscFunctionBegin;
856   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
857   if (sizes)  {PetscValidPointer(sizes, 3);}
858   if (points) {PetscValidPointer(points, 4);}
859   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
860   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
861   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
862   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
863   if (sizes) {
864     for (proc = 0; proc < size; ++proc) {
865       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
866     }
867   }
868   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
869   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
870   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
871   PetscFunctionReturn(0);
872 }
873 
874 /*@
875   PetscPartitionerShellSetRandom - Set the flag to use a random partition
876 
877   Collective on Part
878 
879   Input Parameters:
880 + part   - The PetscPartitioner
881 - random - The flag to use a random partition
882 
883   Level: intermediate
884 
885 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
886 @*/
887 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
888 {
889   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
890 
891   PetscFunctionBegin;
892   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
893   p->random = random;
894   PetscFunctionReturn(0);
895 }
896 
897 /*@
898   PetscPartitionerShellGetRandom - get the flag to use a random partition
899 
900   Collective on Part
901 
902   Input Parameter:
903 . part   - The PetscPartitioner
904 
905   Output Parameter
906 . random - The flag to use a random partition
907 
908   Level: intermediate
909 
910 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
911 @*/
912 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
913 {
914   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
915 
916   PetscFunctionBegin;
917   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
918   PetscValidPointer(random, 2);
919   *random = p->random;
920   PetscFunctionReturn(0);
921 }
922 
923 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
924 {
925   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
926   PetscErrorCode          ierr;
927 
928   PetscFunctionBegin;
929   ierr = PetscFree(p);CHKERRQ(ierr);
930   PetscFunctionReturn(0);
931 }
932 
933 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
934 {
935   PetscViewerFormat format;
936   PetscErrorCode    ierr;
937 
938   PetscFunctionBegin;
939   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
940   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
941   PetscFunctionReturn(0);
942 }
943 
944 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
945 {
946   PetscBool      iascii;
947   PetscErrorCode ierr;
948 
949   PetscFunctionBegin;
950   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
951   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
952   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
953   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
954   PetscFunctionReturn(0);
955 }
956 
957 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
958 {
959   MPI_Comm       comm;
960   PetscInt       np;
961   PetscMPIInt    size;
962   PetscErrorCode ierr;
963 
964   PetscFunctionBegin;
965   comm = PetscObjectComm((PetscObject)dm);
966   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
967   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
968   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
969   if (size == 1) {
970     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
971   }
972   else {
973     PetscMPIInt rank;
974     PetscInt nvGlobal, *offsets, myFirst, myLast;
975 
976     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
977     offsets[0] = 0;
978     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
979     for (np = 2; np <= size; np++) {
980       offsets[np] += offsets[np-1];
981     }
982     nvGlobal = offsets[size];
983     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
984     myFirst = offsets[rank];
985     myLast  = offsets[rank + 1] - 1;
986     ierr = PetscFree(offsets);CHKERRQ(ierr);
987     if (numVertices) {
988       PetscInt firstPart = 0, firstLargePart = 0;
989       PetscInt lastPart = 0, lastLargePart = 0;
990       PetscInt rem = nvGlobal % nparts;
991       PetscInt pSmall = nvGlobal/nparts;
992       PetscInt pBig = nvGlobal/nparts + 1;
993 
994 
995       if (rem) {
996         firstLargePart = myFirst / pBig;
997         lastLargePart  = myLast  / pBig;
998 
999         if (firstLargePart < rem) {
1000           firstPart = firstLargePart;
1001         }
1002         else {
1003           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1004         }
1005         if (lastLargePart < rem) {
1006           lastPart = lastLargePart;
1007         }
1008         else {
1009           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1010         }
1011       }
1012       else {
1013         firstPart = myFirst / (nvGlobal/nparts);
1014         lastPart  = myLast  / (nvGlobal/nparts);
1015       }
1016 
1017       for (np = firstPart; np <= lastPart; np++) {
1018         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1019         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1020 
1021         PartStart = PetscMax(PartStart,myFirst);
1022         PartEnd   = PetscMin(PartEnd,myLast+1);
1023         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1024       }
1025     }
1026   }
1027   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1032 {
1033   PetscFunctionBegin;
1034   part->ops->view      = PetscPartitionerView_Simple;
1035   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1036   part->ops->partition = PetscPartitionerPartition_Simple;
1037   PetscFunctionReturn(0);
1038 }
1039 
1040 /*MC
1041   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1042 
1043   Level: intermediate
1044 
1045 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1046 M*/
1047 
1048 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1049 {
1050   PetscPartitioner_Simple *p;
1051   PetscErrorCode           ierr;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1055   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1056   part->data = p;
1057 
1058   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1059   PetscFunctionReturn(0);
1060 }
1061 
1062 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1063 {
1064   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1065   PetscErrorCode          ierr;
1066 
1067   PetscFunctionBegin;
1068   ierr = PetscFree(p);CHKERRQ(ierr);
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1073 {
1074   PetscViewerFormat format;
1075   PetscErrorCode    ierr;
1076 
1077   PetscFunctionBegin;
1078   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1079   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1080   PetscFunctionReturn(0);
1081 }
1082 
1083 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1084 {
1085   PetscBool      iascii;
1086   PetscErrorCode ierr;
1087 
1088   PetscFunctionBegin;
1089   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1090   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1091   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1092   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1097 {
1098   PetscInt       np;
1099   PetscErrorCode ierr;
1100 
1101   PetscFunctionBegin;
1102   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1103   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1104   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1105   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1106   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1107   PetscFunctionReturn(0);
1108 }
1109 
1110 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1111 {
1112   PetscFunctionBegin;
1113   part->ops->view      = PetscPartitionerView_Gather;
1114   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1115   part->ops->partition = PetscPartitionerPartition_Gather;
1116   PetscFunctionReturn(0);
1117 }
1118 
1119 /*MC
1120   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1121 
1122   Level: intermediate
1123 
1124 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1125 M*/
1126 
1127 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1128 {
1129   PetscPartitioner_Gather *p;
1130   PetscErrorCode           ierr;
1131 
1132   PetscFunctionBegin;
1133   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1134   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1135   part->data = p;
1136 
1137   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1138   PetscFunctionReturn(0);
1139 }
1140 
1141 
1142 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1143 {
1144   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1145   PetscErrorCode          ierr;
1146 
1147   PetscFunctionBegin;
1148   ierr = PetscFree(p);CHKERRQ(ierr);
1149   PetscFunctionReturn(0);
1150 }
1151 
1152 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1153 {
1154   PetscViewerFormat format;
1155   PetscErrorCode    ierr;
1156 
1157   PetscFunctionBegin;
1158   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1159   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1160   PetscFunctionReturn(0);
1161 }
1162 
1163 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1164 {
1165   PetscBool      iascii;
1166   PetscErrorCode ierr;
1167 
1168   PetscFunctionBegin;
1169   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1170   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1171   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1172   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1173   PetscFunctionReturn(0);
1174 }
1175 
1176 #if defined(PETSC_HAVE_CHACO)
1177 #if defined(PETSC_HAVE_UNISTD_H)
1178 #include <unistd.h>
1179 #endif
1180 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1181 #include <chaco.h>
1182 #else
1183 /* Older versions of Chaco do not have an include file */
1184 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1185                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1186                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1187                        int mesh_dims[3], double *goal, int global_method, int local_method,
1188                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1189 #endif
1190 extern int FREE_GRAPH;
1191 #endif
1192 
1193 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1194 {
1195 #if defined(PETSC_HAVE_CHACO)
1196   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1197   MPI_Comm       comm;
1198   int            nvtxs          = numVertices; /* number of vertices in full graph */
1199   int           *vwgts          = NULL;   /* weights for all vertices */
1200   float         *ewgts          = NULL;   /* weights for all edges */
1201   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1202   char          *outassignname  = NULL;   /*  name of assignment output file */
1203   char          *outfilename    = NULL;   /* output file name */
1204   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1205   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1206   int            mesh_dims[3];            /* dimensions of mesh of processors */
1207   double        *goal          = NULL;    /* desired set sizes for each set */
1208   int            global_method = 1;       /* global partitioning algorithm */
1209   int            local_method  = 1;       /* local partitioning algorithm */
1210   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1211   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1212   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1213   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1214   long           seed          = 123636512; /* for random graph mutations */
1215 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1216   int           *assignment;              /* Output partition */
1217 #else
1218   short int     *assignment;              /* Output partition */
1219 #endif
1220   int            fd_stdout, fd_pipe[2];
1221   PetscInt      *points;
1222   int            i, v, p;
1223   PetscErrorCode ierr;
1224 
1225   PetscFunctionBegin;
1226   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1227 #if defined (PETSC_USE_DEBUG)
1228   {
1229     int ival,isum;
1230     PetscBool distributed;
1231 
1232     ival = (numVertices > 0);
1233     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1234     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1235     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1236   }
1237 #endif
1238   if (!numVertices) {
1239     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1240     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1241     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1242     PetscFunctionReturn(0);
1243   }
1244   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1245   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1246 
1247   if (global_method == INERTIAL_METHOD) {
1248     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1249     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1250   }
1251   mesh_dims[0] = nparts;
1252   mesh_dims[1] = 1;
1253   mesh_dims[2] = 1;
1254   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1255   /* Chaco outputs to stdout. We redirect this to a buffer. */
1256   /* TODO: check error codes for UNIX calls */
1257 #if defined(PETSC_HAVE_UNISTD_H)
1258   {
1259     int piperet;
1260     piperet = pipe(fd_pipe);
1261     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1262     fd_stdout = dup(1);
1263     close(1);
1264     dup2(fd_pipe[1], 1);
1265   }
1266 #endif
1267   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1268                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1269                    vmax, ndims, eigtol, seed);
1270 #if defined(PETSC_HAVE_UNISTD_H)
1271   {
1272     char msgLog[10000];
1273     int  count;
1274 
1275     fflush(stdout);
1276     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1277     if (count < 0) count = 0;
1278     msgLog[count] = 0;
1279     close(1);
1280     dup2(fd_stdout, 1);
1281     close(fd_stdout);
1282     close(fd_pipe[0]);
1283     close(fd_pipe[1]);
1284     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1285   }
1286 #else
1287   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1288 #endif
1289   /* Convert to PetscSection+IS */
1290   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1291   for (v = 0; v < nvtxs; ++v) {
1292     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1293   }
1294   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1295   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1296   for (p = 0, i = 0; p < nparts; ++p) {
1297     for (v = 0; v < nvtxs; ++v) {
1298       if (assignment[v] == p) points[i++] = v;
1299     }
1300   }
1301   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1302   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1303   if (global_method == INERTIAL_METHOD) {
1304     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1305   }
1306   ierr = PetscFree(assignment);CHKERRQ(ierr);
1307   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1308   PetscFunctionReturn(0);
1309 #else
1310   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1311 #endif
1312 }
1313 
1314 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1315 {
1316   PetscFunctionBegin;
1317   part->ops->view      = PetscPartitionerView_Chaco;
1318   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1319   part->ops->partition = PetscPartitionerPartition_Chaco;
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 /*MC
1324   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1325 
1326   Level: intermediate
1327 
1328 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1329 M*/
1330 
1331 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1332 {
1333   PetscPartitioner_Chaco *p;
1334   PetscErrorCode          ierr;
1335 
1336   PetscFunctionBegin;
1337   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1338   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1339   part->data = p;
1340 
1341   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1342   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1347 {
1348   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1349   PetscErrorCode             ierr;
1350 
1351   PetscFunctionBegin;
1352   ierr = PetscFree(p);CHKERRQ(ierr);
1353   PetscFunctionReturn(0);
1354 }
1355 
1356 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1357 {
1358   PetscViewerFormat format;
1359   PetscErrorCode    ierr;
1360 
1361   PetscFunctionBegin;
1362   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1363   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1364   PetscFunctionReturn(0);
1365 }
1366 
1367 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1368 {
1369   PetscBool      iascii;
1370   PetscErrorCode ierr;
1371 
1372   PetscFunctionBegin;
1373   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1374   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1375   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1376   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1377   PetscFunctionReturn(0);
1378 }
1379 
1380 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1381 {
1382   static const char         *ptypes[] = {"kway", "rb"};
1383   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1384   PetscErrorCode            ierr;
1385 
1386   PetscFunctionBegin;
1387   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1388   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1389   ierr = PetscOptionsTail();CHKERRQ(ierr);
1390   PetscFunctionReturn(0);
1391 }
1392 
1393 #if defined(PETSC_HAVE_PARMETIS)
1394 #include <parmetis.h>
1395 #endif
1396 
1397 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1398 {
1399 #if defined(PETSC_HAVE_PARMETIS)
1400   MPI_Comm       comm;
1401   PetscSection   section;
1402   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1403   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1404   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1405   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1406   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1407   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1408   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1409   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1410   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1411   real_t        *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1412   real_t        *ubvec;                    /* The balance intolerance for vertex weights */
1413   PetscInt       options[64];              /* Options */
1414   /* Outputs */
1415   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1416   PetscInt       v, i, *assignment, *points;
1417   PetscMPIInt    size, rank, p;
1418   PetscInt       metis_ptype = ((PetscPartitioner_ParMetis *) part->data)->ptype;
1419   PetscErrorCode ierr;
1420 
1421   PetscFunctionBegin;
1422   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1423   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1424   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1425   /* Calculate vertex distribution */
1426   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1427   vtxdist[0] = 0;
1428   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1429   for (p = 2; p <= size; ++p) {
1430     vtxdist[p] += vtxdist[p-1];
1431   }
1432   /* Calculate partition weights */
1433   for (p = 0; p < nparts; ++p) {
1434     tpwgts[p] = 1.0/nparts;
1435   }
1436   ubvec[0] = 1.05;
1437   /* Weight cells by dofs on cell by default */
1438   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1439   if (section) {
1440     PetscInt cStart, cEnd, dof;
1441 
1442     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1443     for (v = cStart; v < cEnd; ++v) {
1444       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1445       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1446       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1447       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1448     }
1449   } else {
1450     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1451   }
1452   wgtflag |= 2; /* have weights on graph vertices */
1453 
1454   if (nparts == 1) {
1455     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1456   } else {
1457     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1458     if (vtxdist[p+1] == vtxdist[size]) {
1459       if (rank == p) {
1460         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1461         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1462         if (metis_ptype == 1) {
1463           PetscStackPush("METIS_PartGraphRecursive");
1464           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &edgeCut, assignment);
1465           PetscStackPop;
1466           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1467         } else {
1468           /*
1469            It would be nice to activate the two options below, but they would need some actual testing.
1470            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1471            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1472           */
1473           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1474           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1475           PetscStackPush("METIS_PartGraphKway");
1476           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &edgeCut, assignment);
1477           PetscStackPop;
1478           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1479         }
1480       }
1481     } else {
1482       options[0] = 0; /* use all defaults */
1483       PetscStackPush("ParMETIS_V3_PartKway");
1484       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1485       PetscStackPop;
1486       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1487     }
1488   }
1489   /* Convert to PetscSection+IS */
1490   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1491   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1492   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1493   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1494   for (p = 0, i = 0; p < nparts; ++p) {
1495     for (v = 0; v < nvtxs; ++v) {
1496       if (assignment[v] == p) points[i++] = v;
1497     }
1498   }
1499   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1500   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1501   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1502   PetscFunctionReturn(0);
1503 #else
1504   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1505 #endif
1506 }
1507 
1508 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1509 {
1510   PetscFunctionBegin;
1511   part->ops->view           = PetscPartitionerView_ParMetis;
1512   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1513   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1514   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1515   PetscFunctionReturn(0);
1516 }
1517 
1518 /*MC
1519   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1520 
1521   Level: intermediate
1522 
1523 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1524 M*/
1525 
1526 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1527 {
1528   PetscPartitioner_ParMetis *p;
1529   PetscErrorCode          ierr;
1530 
1531   PetscFunctionBegin;
1532   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1533   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1534   part->data = p;
1535 
1536   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1537   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1538   PetscFunctionReturn(0);
1539 }
1540 
1541 
1542 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1543 const char PTScotchPartitionerCitation[] =
1544   "@article{PTSCOTCH,\n"
1545   "  author  = {C. Chevalier and F. Pellegrini},\n"
1546   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1547   "  journal = {Parallel Computing},\n"
1548   "  volume  = {34},\n"
1549   "  number  = {6},\n"
1550   "  pages   = {318--331},\n"
1551   "  year    = {2008},\n"
1552   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1553   "}\n";
1554 
1555 typedef struct {
1556   PetscInt  strategy;
1557   PetscReal imbalance;
1558 } PetscPartitioner_PTScotch;
1559 
1560 static const char *const
1561 PTScotchStrategyList[] = {
1562   "DEFAULT",
1563   "QUALITY",
1564   "SPEED",
1565   "BALANCE",
1566   "SAFETY",
1567   "SCALABILITY",
1568   "RECURSIVE",
1569   "REMAP"
1570 };
1571 
1572 #if defined(PETSC_HAVE_PTSCOTCH)
1573 
1574 EXTERN_C_BEGIN
1575 #include <ptscotch.h>
1576 EXTERN_C_END
1577 
1578 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1579 
1580 static int PTScotch_Strategy(PetscInt strategy)
1581 {
1582   switch (strategy) {
1583   case  0: return SCOTCH_STRATDEFAULT;
1584   case  1: return SCOTCH_STRATQUALITY;
1585   case  2: return SCOTCH_STRATSPEED;
1586   case  3: return SCOTCH_STRATBALANCE;
1587   case  4: return SCOTCH_STRATSAFETY;
1588   case  5: return SCOTCH_STRATSCALABILITY;
1589   case  6: return SCOTCH_STRATRECURSIVE;
1590   case  7: return SCOTCH_STRATREMAP;
1591   default: return SCOTCH_STRATDEFAULT;
1592   }
1593 }
1594 
1595 
1596 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1597                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1598 {
1599   SCOTCH_Graph   grafdat;
1600   SCOTCH_Strat   stradat;
1601   SCOTCH_Num     vertnbr = n;
1602   SCOTCH_Num     edgenbr = xadj[n];
1603   SCOTCH_Num*    velotab = vtxwgt;
1604   SCOTCH_Num*    edlotab = adjwgt;
1605   SCOTCH_Num     flagval = strategy;
1606   double         kbalval = imbalance;
1607   PetscErrorCode ierr;
1608 
1609   PetscFunctionBegin;
1610   {
1611     PetscBool flg = PETSC_TRUE;
1612     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1613     if (!flg) velotab = NULL;
1614   }
1615   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1616   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1617   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1618   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1619 #if defined(PETSC_USE_DEBUG)
1620   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1621 #endif
1622   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1623   SCOTCH_stratExit(&stradat);
1624   SCOTCH_graphExit(&grafdat);
1625   PetscFunctionReturn(0);
1626 }
1627 
1628 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1629                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1630 {
1631   PetscMPIInt     procglbnbr;
1632   PetscMPIInt     proclocnum;
1633   SCOTCH_Arch     archdat;
1634   SCOTCH_Dgraph   grafdat;
1635   SCOTCH_Dmapping mappdat;
1636   SCOTCH_Strat    stradat;
1637   SCOTCH_Num      vertlocnbr;
1638   SCOTCH_Num      edgelocnbr;
1639   SCOTCH_Num*     veloloctab = vtxwgt;
1640   SCOTCH_Num*     edloloctab = adjwgt;
1641   SCOTCH_Num      flagval = strategy;
1642   double          kbalval = imbalance;
1643   PetscErrorCode  ierr;
1644 
1645   PetscFunctionBegin;
1646   {
1647     PetscBool flg = PETSC_TRUE;
1648     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1649     if (!flg) veloloctab = NULL;
1650   }
1651   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1652   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1653   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1654   edgelocnbr = xadj[vertlocnbr];
1655 
1656   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1657   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1658 #if defined(PETSC_USE_DEBUG)
1659   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1660 #endif
1661   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1662   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1663   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1664   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1665   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1666   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1667   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1668   SCOTCH_archExit(&archdat);
1669   SCOTCH_stratExit(&stradat);
1670   SCOTCH_dgraphExit(&grafdat);
1671   PetscFunctionReturn(0);
1672 }
1673 
1674 #endif /* PETSC_HAVE_PTSCOTCH */
1675 
1676 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1677 {
1678   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1679   PetscErrorCode             ierr;
1680 
1681   PetscFunctionBegin;
1682   ierr = PetscFree(p);CHKERRQ(ierr);
1683   PetscFunctionReturn(0);
1684 }
1685 
1686 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1687 {
1688   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1689   PetscErrorCode            ierr;
1690 
1691   PetscFunctionBegin;
1692   ierr = PetscViewerASCIIPrintf(viewer, "PTScotch Graph Partitioner:\n");CHKERRQ(ierr);
1693   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1694   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1695   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1696   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1697   PetscFunctionReturn(0);
1698 }
1699 
1700 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1701 {
1702   PetscBool      iascii;
1703   PetscErrorCode ierr;
1704 
1705   PetscFunctionBegin;
1706   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1707   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1708   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1709   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1710   PetscFunctionReturn(0);
1711 }
1712 
1713 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1714 {
1715   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1716   const char *const         *slist = PTScotchStrategyList;
1717   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1718   PetscBool                 flag;
1719   PetscErrorCode            ierr;
1720 
1721   PetscFunctionBegin;
1722   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1723   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1724   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1725   ierr = PetscOptionsTail();CHKERRQ(ierr);
1726   PetscFunctionReturn(0);
1727 }
1728 
1729 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1730 {
1731 #if defined(PETSC_HAVE_PTSCOTCH)
1732   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1733   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1734   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1735   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1736   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1737   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1738   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1739   PetscInt       v, i, *assignment, *points;
1740   PetscMPIInt    size, rank, p;
1741   PetscErrorCode ierr;
1742 
1743   PetscFunctionBegin;
1744   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1745   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1746   ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1747 
1748   /* Calculate vertex distribution */
1749   vtxdist[0] = 0;
1750   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1751   for (p = 2; p <= size; ++p) {
1752     vtxdist[p] += vtxdist[p-1];
1753   }
1754 
1755   if (nparts == 1) {
1756     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1757   } else {
1758     PetscSection section;
1759     /* Weight cells by dofs on cell by default */
1760     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1761     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1762     if (section) {
1763       PetscInt vStart, vEnd, dof;
1764       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1765       for (v = vStart; v < vEnd; ++v) {
1766         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1767         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1768         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1769         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1770       }
1771     } else {
1772       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1773     }
1774     {
1775       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1776       int                       strat = PTScotch_Strategy(pts->strategy);
1777       double                    imbal = (double)pts->imbalance;
1778 
1779       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1780       if (vtxdist[p+1] == vtxdist[size]) {
1781         if (rank == p) {
1782           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1783         }
1784       } else {
1785         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1786       }
1787     }
1788     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1789   }
1790 
1791   /* Convert to PetscSection+IS */
1792   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1793   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1794   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1795   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1796   for (p = 0, i = 0; p < nparts; ++p) {
1797     for (v = 0; v < nvtxs; ++v) {
1798       if (assignment[v] == p) points[i++] = v;
1799     }
1800   }
1801   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1802   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1803 
1804   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1805   PetscFunctionReturn(0);
1806 #else
1807   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1808 #endif
1809 }
1810 
1811 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1812 {
1813   PetscFunctionBegin;
1814   part->ops->view           = PetscPartitionerView_PTScotch;
1815   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1816   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1817   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1818   PetscFunctionReturn(0);
1819 }
1820 
1821 /*MC
1822   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1823 
1824   Level: intermediate
1825 
1826 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1827 M*/
1828 
1829 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1830 {
1831   PetscPartitioner_PTScotch *p;
1832   PetscErrorCode          ierr;
1833 
1834   PetscFunctionBegin;
1835   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1836   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1837   part->data = p;
1838 
1839   p->strategy  = 0;
1840   p->imbalance = 0.01;
1841 
1842   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1843   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1844   PetscFunctionReturn(0);
1845 }
1846 
1847 
1848 /*@
1849   DMPlexGetPartitioner - Get the mesh partitioner
1850 
1851   Not collective
1852 
1853   Input Parameter:
1854 . dm - The DM
1855 
1856   Output Parameter:
1857 . part - The PetscPartitioner
1858 
1859   Level: developer
1860 
1861   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1862 
1863 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1864 @*/
1865 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1866 {
1867   DM_Plex       *mesh = (DM_Plex *) dm->data;
1868 
1869   PetscFunctionBegin;
1870   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1871   PetscValidPointer(part, 2);
1872   *part = mesh->partitioner;
1873   PetscFunctionReturn(0);
1874 }
1875 
1876 /*@
1877   DMPlexSetPartitioner - Set the mesh partitioner
1878 
1879   logically collective on dm and part
1880 
1881   Input Parameters:
1882 + dm - The DM
1883 - part - The partitioner
1884 
1885   Level: developer
1886 
1887   Note: Any existing PetscPartitioner will be destroyed.
1888 
1889 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1890 @*/
1891 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1892 {
1893   DM_Plex       *mesh = (DM_Plex *) dm->data;
1894   PetscErrorCode ierr;
1895 
1896   PetscFunctionBegin;
1897   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1898   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1899   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1900   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1901   mesh->partitioner = part;
1902   PetscFunctionReturn(0);
1903 }
1904 
1905 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHashI ht, PetscInt point, PetscBool up, PetscBool down)
1906 {
1907   PetscErrorCode ierr;
1908 
1909   PetscFunctionBegin;
1910   if (up) {
1911     PetscInt parent;
1912 
1913     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1914     if (parent != point) {
1915       PetscInt closureSize, *closure = NULL, i;
1916 
1917       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1918       for (i = 0; i < closureSize; i++) {
1919         PetscInt cpoint = closure[2*i];
1920         PETSC_UNUSED PetscHashIIter iter, ret;
1921 
1922         PetscHashIPut(ht, cpoint, ret, iter);
1923         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1924       }
1925       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1926     }
1927   }
1928   if (down) {
1929     PetscInt numChildren;
1930     const PetscInt *children;
1931 
1932     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1933     if (numChildren) {
1934       PetscInt i;
1935 
1936       for (i = 0; i < numChildren; i++) {
1937         PetscInt cpoint = children[i];
1938         PETSC_UNUSED PetscHashIIter iter, ret;
1939 
1940         PetscHashIPut(ht, cpoint, ret, iter);
1941         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1942       }
1943     }
1944   }
1945   PetscFunctionReturn(0);
1946 }
1947 
1948 /*@
1949   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1950 
1951   Input Parameters:
1952 + dm     - The DM
1953 - label  - DMLabel assinging ranks to remote roots
1954 
1955   Level: developer
1956 
1957 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1958 @*/
1959 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1960 {
1961   IS              rankIS,   pointIS;
1962   const PetscInt *ranks,   *points;
1963   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1964   PetscInt       *closure = NULL;
1965   DM_Plex        *mesh    = (DM_Plex *)dm->data;
1966   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
1967   PetscErrorCode  ierr;
1968 
1969   PetscFunctionBegin;
1970   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1971   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1972   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1973 
1974   for (r = 0; r < numRanks; ++r) {
1975     const PetscInt rank = ranks[r];
1976     PetscHashI     ht;
1977     PETSC_UNUSED   PetscHashIIter iter, ret;
1978     PetscInt       nkeys, *keys, off = 0;
1979 
1980     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1981     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1982     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1983     PetscHashICreate(ht);
1984     PetscHashIResize(ht, numPoints*16);
1985     for (p = 0; p < numPoints; ++p) {
1986       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1987       for (c = 0; c < closureSize*2; c += 2) {
1988         PetscHashIPut(ht, closure[c], ret, iter);
1989         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
1990       }
1991     }
1992     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1993     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1994     PetscHashISize(ht, nkeys);
1995     ierr = PetscMalloc1(nkeys, &keys);CHKERRQ(ierr);
1996     PetscHashIGetKeys(ht, &off, keys);
1997     PetscHashIDestroy(ht);
1998     ierr = PetscSortInt(nkeys, keys);CHKERRQ(ierr);
1999     ierr = ISCreateGeneral(PETSC_COMM_SELF, nkeys, keys, PETSC_OWN_POINTER, &pointIS);CHKERRQ(ierr);
2000     ierr = DMLabelSetStratumIS(label, rank, pointIS);CHKERRQ(ierr);
2001     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2002   }
2003 
2004   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2005   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2006   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2007   PetscFunctionReturn(0);
2008 }
2009 
2010 /*@
2011   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2012 
2013   Input Parameters:
2014 + dm     - The DM
2015 - label  - DMLabel assinging ranks to remote roots
2016 
2017   Level: developer
2018 
2019 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2020 @*/
2021 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2022 {
2023   IS              rankIS,   pointIS;
2024   const PetscInt *ranks,   *points;
2025   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2026   PetscInt       *adj = NULL;
2027   PetscErrorCode  ierr;
2028 
2029   PetscFunctionBegin;
2030   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2031   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2032   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2033   for (r = 0; r < numRanks; ++r) {
2034     const PetscInt rank = ranks[r];
2035 
2036     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2037     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2038     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2039     for (p = 0; p < numPoints; ++p) {
2040       adjSize = PETSC_DETERMINE;
2041       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2042       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2043     }
2044     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2045     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2046   }
2047   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2048   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2049   ierr = PetscFree(adj);CHKERRQ(ierr);
2050   PetscFunctionReturn(0);
2051 }
2052 
2053 /*@
2054   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2055 
2056   Input Parameters:
2057 + dm     - The DM
2058 - label  - DMLabel assinging ranks to remote roots
2059 
2060   Level: developer
2061 
2062   Note: This is required when generating multi-level overlaps to capture
2063   overlap points from non-neighbouring partitions.
2064 
2065 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2066 @*/
2067 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2068 {
2069   MPI_Comm        comm;
2070   PetscMPIInt     rank;
2071   PetscSF         sfPoint;
2072   DMLabel         lblRoots, lblLeaves;
2073   IS              rankIS, pointIS;
2074   const PetscInt *ranks;
2075   PetscInt        numRanks, r;
2076   PetscErrorCode  ierr;
2077 
2078   PetscFunctionBegin;
2079   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2080   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2081   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2082   /* Pull point contributions from remote leaves into local roots */
2083   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2084   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2085   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2086   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2087   for (r = 0; r < numRanks; ++r) {
2088     const PetscInt remoteRank = ranks[r];
2089     if (remoteRank == rank) continue;
2090     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2091     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2092     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2093   }
2094   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2095   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2096   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2097   /* Push point contributions from roots into remote leaves */
2098   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2099   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2100   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2101   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2102   for (r = 0; r < numRanks; ++r) {
2103     const PetscInt remoteRank = ranks[r];
2104     if (remoteRank == rank) continue;
2105     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2106     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2107     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2108   }
2109   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2110   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2111   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2112   PetscFunctionReturn(0);
2113 }
2114 
2115 /*@
2116   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2117 
2118   Input Parameters:
2119 + dm        - The DM
2120 . rootLabel - DMLabel assinging ranks to local roots
2121 . processSF - A star forest mapping into the local index on each remote rank
2122 
2123   Output Parameter:
2124 - leafLabel - DMLabel assinging ranks to remote roots
2125 
2126   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2127   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2128 
2129   Level: developer
2130 
2131 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2132 @*/
2133 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2134 {
2135   MPI_Comm           comm;
2136   PetscMPIInt        rank, size;
2137   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2138   PetscSF            sfPoint;
2139   PetscSFNode       *rootPoints, *leafPoints;
2140   PetscSection       rootSection, leafSection;
2141   const PetscSFNode *remote;
2142   const PetscInt    *local, *neighbors;
2143   IS                 valueIS;
2144   PetscErrorCode     ierr;
2145 
2146   PetscFunctionBegin;
2147   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2148   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2149   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2150   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2151 
2152   /* Convert to (point, rank) and use actual owners */
2153   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2154   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2155   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2156   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2157   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2158   for (n = 0; n < numNeighbors; ++n) {
2159     PetscInt numPoints;
2160 
2161     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2162     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2163   }
2164   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2165   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
2166   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
2167   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2168   for (n = 0; n < numNeighbors; ++n) {
2169     IS              pointIS;
2170     const PetscInt *points;
2171     PetscInt        off, numPoints, p;
2172 
2173     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2174     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2175     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2176     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2177     for (p = 0; p < numPoints; ++p) {
2178       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2179       else       {l = -1;}
2180       if (l >= 0) {rootPoints[off+p] = remote[l];}
2181       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2182     }
2183     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2184     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2185   }
2186   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2187   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2188   /* Communicate overlap */
2189   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2190   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2191   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2192   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
2193   for (p = 0; p < ssize; p++) {
2194     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2195   }
2196   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2197   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2198   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2199   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2200   PetscFunctionReturn(0);
2201 }
2202 
2203 /*@
2204   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2205 
2206   Input Parameters:
2207 + dm    - The DM
2208 . label - DMLabel assinging ranks to remote roots
2209 
2210   Output Parameter:
2211 - sf    - The star forest communication context encapsulating the defined mapping
2212 
2213   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2214 
2215   Level: developer
2216 
2217 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2218 @*/
2219 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2220 {
2221   PetscMPIInt     rank, size;
2222   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2223   PetscSFNode    *remotePoints;
2224   IS              remoteRootIS;
2225   const PetscInt *remoteRoots;
2226   PetscErrorCode ierr;
2227 
2228   PetscFunctionBegin;
2229   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2230   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2231 
2232   for (numRemote = 0, n = 0; n < size; ++n) {
2233     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2234     numRemote += numPoints;
2235   }
2236   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2237   /* Put owned points first */
2238   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2239   if (numPoints > 0) {
2240     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2241     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2242     for (p = 0; p < numPoints; p++) {
2243       remotePoints[idx].index = remoteRoots[p];
2244       remotePoints[idx].rank = rank;
2245       idx++;
2246     }
2247     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2248     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2249   }
2250   /* Now add remote points */
2251   for (n = 0; n < size; ++n) {
2252     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2253     if (numPoints <= 0 || n == rank) continue;
2254     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2255     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2256     for (p = 0; p < numPoints; p++) {
2257       remotePoints[idx].index = remoteRoots[p];
2258       remotePoints[idx].rank = n;
2259       idx++;
2260     }
2261     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2262     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2263   }
2264   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2265   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2266   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2267   PetscFunctionReturn(0);
2268 }
2269