xref: /petsc/src/dm/impls/plex/plexpartition.c (revision f051a2ed23442f31719a59b98ef74ee6cb7951be)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
79   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
80   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
81     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
82     if (nroots > 0) {if (cellNum[p] < 0) continue;}
83     adjSize = PETSC_DETERMINE;
84     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
85     for (a = 0; a < adjSize; ++a) {
86       const PetscInt point = adj[a];
87       if (point != p && pStart <= point && point < pEnd) {
88         PetscInt *PETSC_RESTRICT pBuf;
89         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
90         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
91         *pBuf = point;
92       }
93     }
94     (*numVertices)++;
95   }
96   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
97   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
98   /* Derive CSR graph from section/segbuffer */
99   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
100   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
101   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
102   for (idx = 0, p = pStart; p < pEnd; p++) {
103     if (nroots > 0) {if (cellNum[p] < 0) continue;}
104     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
105   }
106   vOffsets[*numVertices] = size;
107   if (offsets) *offsets = vOffsets;
108   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
109   {
110     ISLocalToGlobalMapping ltogCells;
111     PetscInt n, size, *cells_arr;
112     /* In parallel, apply a global cell numbering to the graph */
113     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
114     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
115     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
117     /* Convert to positive global cell numbers */
118     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
119     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
121     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
122     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
123   }
124   if (adjacency) *adjacency = graph;
125   /* Clean up */
126   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
127   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
128   ierr = PetscFree(adj);CHKERRQ(ierr);
129   PetscFunctionReturn(0);
130 }
131 
132 #undef __FUNCT__
133 #define __FUNCT__ "DMPlexCreateNeighborCSR"
134 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
135 {
136   const PetscInt maxFaceCases = 30;
137   PetscInt       numFaceCases = 0;
138   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
139   PetscInt      *off, *adj;
140   PetscInt      *neighborCells = NULL;
141   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   /* For parallel partitioning, I think you have to communicate supports */
146   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
147   cellDim = dim - cellHeight;
148   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
149   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
150   if (cEnd - cStart == 0) {
151     if (numVertices) *numVertices = 0;
152     if (offsets)   *offsets   = NULL;
153     if (adjacency) *adjacency = NULL;
154     PetscFunctionReturn(0);
155   }
156   numCells  = cEnd - cStart;
157   faceDepth = depth - cellHeight;
158   if (dim == depth) {
159     PetscInt f, fStart, fEnd;
160 
161     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
162     /* Count neighboring cells */
163     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
164     for (f = fStart; f < fEnd; ++f) {
165       const PetscInt *support;
166       PetscInt        supportSize;
167       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
168       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
169       if (supportSize == 2) {
170         PetscInt numChildren;
171 
172         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
173         if (!numChildren) {
174           ++off[support[0]-cStart+1];
175           ++off[support[1]-cStart+1];
176         }
177       }
178     }
179     /* Prefix sum */
180     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
181     if (adjacency) {
182       PetscInt *tmp;
183 
184       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
185       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
186       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
187       /* Get neighboring cells */
188       for (f = fStart; f < fEnd; ++f) {
189         const PetscInt *support;
190         PetscInt        supportSize;
191         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
192         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
193         if (supportSize == 2) {
194           PetscInt numChildren;
195 
196           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
197           if (!numChildren) {
198             adj[tmp[support[0]-cStart]++] = support[1];
199             adj[tmp[support[1]-cStart]++] = support[0];
200           }
201         }
202       }
203 #if defined(PETSC_USE_DEBUG)
204       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
205 #endif
206       ierr = PetscFree(tmp);CHKERRQ(ierr);
207     }
208     if (numVertices) *numVertices = numCells;
209     if (offsets)   *offsets   = off;
210     if (adjacency) *adjacency = adj;
211     PetscFunctionReturn(0);
212   }
213   /* Setup face recognition */
214   if (faceDepth == 1) {
215     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
216 
217     for (c = cStart; c < cEnd; ++c) {
218       PetscInt corners;
219 
220       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
221       if (!cornersSeen[corners]) {
222         PetscInt nFV;
223 
224         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
225         cornersSeen[corners] = 1;
226 
227         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
228 
229         numFaceVertices[numFaceCases++] = nFV;
230       }
231     }
232   }
233   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
234   /* Count neighboring cells */
235   for (cell = cStart; cell < cEnd; ++cell) {
236     PetscInt numNeighbors = PETSC_DETERMINE, n;
237 
238     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
239     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
240     for (n = 0; n < numNeighbors; ++n) {
241       PetscInt        cellPair[2];
242       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
243       PetscInt        meetSize = 0;
244       const PetscInt *meet    = NULL;
245 
246       cellPair[0] = cell; cellPair[1] = neighborCells[n];
247       if (cellPair[0] == cellPair[1]) continue;
248       if (!found) {
249         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
250         if (meetSize) {
251           PetscInt f;
252 
253           for (f = 0; f < numFaceCases; ++f) {
254             if (numFaceVertices[f] == meetSize) {
255               found = PETSC_TRUE;
256               break;
257             }
258           }
259         }
260         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
261       }
262       if (found) ++off[cell-cStart+1];
263     }
264   }
265   /* Prefix sum */
266   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
267 
268   if (adjacency) {
269     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
270     /* Get neighboring cells */
271     for (cell = cStart; cell < cEnd; ++cell) {
272       PetscInt numNeighbors = PETSC_DETERMINE, n;
273       PetscInt cellOffset   = 0;
274 
275       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
276       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
277       for (n = 0; n < numNeighbors; ++n) {
278         PetscInt        cellPair[2];
279         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
280         PetscInt        meetSize = 0;
281         const PetscInt *meet    = NULL;
282 
283         cellPair[0] = cell; cellPair[1] = neighborCells[n];
284         if (cellPair[0] == cellPair[1]) continue;
285         if (!found) {
286           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
287           if (meetSize) {
288             PetscInt f;
289 
290             for (f = 0; f < numFaceCases; ++f) {
291               if (numFaceVertices[f] == meetSize) {
292                 found = PETSC_TRUE;
293                 break;
294               }
295             }
296           }
297           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
298         }
299         if (found) {
300           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
301           ++cellOffset;
302         }
303       }
304     }
305   }
306   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
307   if (numVertices) *numVertices = numCells;
308   if (offsets)   *offsets   = off;
309   if (adjacency) *adjacency = adj;
310   PetscFunctionReturn(0);
311 }
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "PetscPartitionerRegister"
315 /*@C
316   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
317 
318   Not Collective
319 
320   Input Parameters:
321 + name        - The name of a new user-defined creation routine
322 - create_func - The creation routine itself
323 
324   Notes:
325   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
326 
327   Sample usage:
328 .vb
329     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
330 .ve
331 
332   Then, your PetscPartitioner type can be chosen with the procedural interface via
333 .vb
334     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
335     PetscPartitionerSetType(PetscPartitioner, "my_part");
336 .ve
337    or at runtime via the option
338 .vb
339     -petscpartitioner_type my_part
340 .ve
341 
342   Level: advanced
343 
344 .keywords: PetscPartitioner, register
345 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
346 
347 @*/
348 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
349 {
350   PetscErrorCode ierr;
351 
352   PetscFunctionBegin;
353   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
354   PetscFunctionReturn(0);
355 }
356 
357 #undef __FUNCT__
358 #define __FUNCT__ "PetscPartitionerSetType"
359 /*@C
360   PetscPartitionerSetType - Builds a particular PetscPartitioner
361 
362   Collective on PetscPartitioner
363 
364   Input Parameters:
365 + part - The PetscPartitioner object
366 - name - The kind of partitioner
367 
368   Options Database Key:
369 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
370 
371   Level: intermediate
372 
373 .keywords: PetscPartitioner, set, type
374 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
375 @*/
376 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
377 {
378   PetscErrorCode (*r)(PetscPartitioner);
379   PetscBool      match;
380   PetscErrorCode ierr;
381 
382   PetscFunctionBegin;
383   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
384   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
385   if (match) PetscFunctionReturn(0);
386 
387   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
388   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
389   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
390 
391   if (part->ops->destroy) {
392     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
393     part->ops->destroy = NULL;
394   }
395   ierr = (*r)(part);CHKERRQ(ierr);
396   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
397   PetscFunctionReturn(0);
398 }
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "PetscPartitionerGetType"
402 /*@C
403   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
404 
405   Not Collective
406 
407   Input Parameter:
408 . part - The PetscPartitioner
409 
410   Output Parameter:
411 . name - The PetscPartitioner type name
412 
413   Level: intermediate
414 
415 .keywords: PetscPartitioner, get, type, name
416 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
417 @*/
418 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
419 {
420   PetscErrorCode ierr;
421 
422   PetscFunctionBegin;
423   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
424   PetscValidPointer(name, 2);
425   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
426   *name = ((PetscObject) part)->type_name;
427   PetscFunctionReturn(0);
428 }
429 
430 #undef __FUNCT__
431 #define __FUNCT__ "PetscPartitionerView"
432 /*@C
433   PetscPartitionerView - Views a PetscPartitioner
434 
435   Collective on PetscPartitioner
436 
437   Input Parameter:
438 + part - the PetscPartitioner object to view
439 - v    - the viewer
440 
441   Level: developer
442 
443 .seealso: PetscPartitionerDestroy()
444 @*/
445 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
446 {
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
451   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
452   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
453   PetscFunctionReturn(0);
454 }
455 
456 #undef __FUNCT__
457 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
458 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
459 {
460   const char    *defaultType;
461   char           name[256];
462   PetscBool      flg;
463   PetscErrorCode ierr;
464 
465   PetscFunctionBegin;
466   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
467   if (!((PetscObject) part)->type_name)
468 #if defined(PETSC_HAVE_CHACO)
469     defaultType = PETSCPARTITIONERCHACO;
470 #elif defined(PETSC_HAVE_PARMETIS)
471     defaultType = PETSCPARTITIONERPARMETIS;
472 #else
473     defaultType = PETSCPARTITIONERSIMPLE;
474 #endif
475   else
476     defaultType = ((PetscObject) part)->type_name;
477   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
478 
479   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
480   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
481   if (flg) {
482     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
483   } else if (!((PetscObject) part)->type_name) {
484     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
485   }
486   ierr = PetscOptionsEnd();CHKERRQ(ierr);
487   PetscFunctionReturn(0);
488 }
489 
490 #undef __FUNCT__
491 #define __FUNCT__ "PetscPartitionerSetFromOptions"
492 /*@
493   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
494 
495   Collective on PetscPartitioner
496 
497   Input Parameter:
498 . part - the PetscPartitioner object to set options for
499 
500   Level: developer
501 
502 .seealso: PetscPartitionerView()
503 @*/
504 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
505 {
506   PetscErrorCode ierr;
507 
508   PetscFunctionBegin;
509   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
510   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
511 
512   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
513   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
514   /* process any options handlers added with PetscObjectAddOptionsHandler() */
515   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
516   ierr = PetscOptionsEnd();CHKERRQ(ierr);
517   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
518   PetscFunctionReturn(0);
519 }
520 
521 #undef __FUNCT__
522 #define __FUNCT__ "PetscPartitionerSetUp"
523 /*@C
524   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
525 
526   Collective on PetscPartitioner
527 
528   Input Parameter:
529 . part - the PetscPartitioner object to setup
530 
531   Level: developer
532 
533 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
534 @*/
535 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
536 {
537   PetscErrorCode ierr;
538 
539   PetscFunctionBegin;
540   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
541   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
542   PetscFunctionReturn(0);
543 }
544 
545 #undef __FUNCT__
546 #define __FUNCT__ "PetscPartitionerDestroy"
547 /*@
548   PetscPartitionerDestroy - Destroys a PetscPartitioner object
549 
550   Collective on PetscPartitioner
551 
552   Input Parameter:
553 . part - the PetscPartitioner object to destroy
554 
555   Level: developer
556 
557 .seealso: PetscPartitionerView()
558 @*/
559 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
560 {
561   PetscErrorCode ierr;
562 
563   PetscFunctionBegin;
564   if (!*part) PetscFunctionReturn(0);
565   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
566 
567   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
568   ((PetscObject) (*part))->refct = 0;
569 
570   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
571   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
572   PetscFunctionReturn(0);
573 }
574 
575 #undef __FUNCT__
576 #define __FUNCT__ "PetscPartitionerCreate"
577 /*@
578   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
579 
580   Collective on MPI_Comm
581 
582   Input Parameter:
583 . comm - The communicator for the PetscPartitioner object
584 
585   Output Parameter:
586 . part - The PetscPartitioner object
587 
588   Level: beginner
589 
590 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
591 @*/
592 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
593 {
594   PetscPartitioner p;
595   PetscErrorCode   ierr;
596 
597   PetscFunctionBegin;
598   PetscValidPointer(part, 2);
599   *part = NULL;
600   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
601 
602   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
603 
604   *part = p;
605   PetscFunctionReturn(0);
606 }
607 
608 #undef __FUNCT__
609 #define __FUNCT__ "PetscPartitionerPartition"
610 /*@
611   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
612 
613   Collective on DM
614 
615   Input Parameters:
616 + part    - The PetscPartitioner
617 - dm      - The mesh DM
618 
619   Output Parameters:
620 + partSection     - The PetscSection giving the division of points by partition
621 - partition       - The list of points by partition
622 
623   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
624 
625   Level: developer
626 
627 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
628 @*/
629 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
630 {
631   PetscMPIInt    size;
632   PetscErrorCode ierr;
633 
634   PetscFunctionBegin;
635   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
636   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
637   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
638   PetscValidPointer(partition, 5);
639   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
640   if (size == 1) {
641     PetscInt *points;
642     PetscInt  cStart, cEnd, c;
643 
644     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
645     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
646     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
647     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
648     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
649     for (c = cStart; c < cEnd; ++c) points[c] = c;
650     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
651     PetscFunctionReturn(0);
652   }
653   if (part->height == 0) {
654     PetscInt  numVertices;
655     PetscInt *start     = NULL;
656     PetscInt *adjacency = NULL;
657 
658     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
659     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
660     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
661     ierr = PetscFree(start);CHKERRQ(ierr);
662     ierr = PetscFree(adjacency);CHKERRQ(ierr);
663   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
664   PetscFunctionReturn(0);
665 }
666 
667 #undef __FUNCT__
668 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
669 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
670 {
671   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
672   PetscErrorCode          ierr;
673 
674   PetscFunctionBegin;
675   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
676   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
677   ierr = PetscFree(p);CHKERRQ(ierr);
678   PetscFunctionReturn(0);
679 }
680 
681 #undef __FUNCT__
682 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
683 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
684 {
685   PetscViewerFormat format;
686   PetscErrorCode    ierr;
687 
688   PetscFunctionBegin;
689   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
690   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
691   PetscFunctionReturn(0);
692 }
693 
694 #undef __FUNCT__
695 #define __FUNCT__ "PetscPartitionerView_Shell"
696 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
697 {
698   PetscBool      iascii;
699   PetscErrorCode ierr;
700 
701   PetscFunctionBegin;
702   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
703   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
704   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
705   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
706   PetscFunctionReturn(0);
707 }
708 
709 #undef __FUNCT__
710 #define __FUNCT__ "PetscPartitionerPartition_Shell"
711 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
712 {
713   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
714   PetscInt                np;
715   PetscErrorCode          ierr;
716 
717   PetscFunctionBegin;
718   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
719   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
720   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
721   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
722   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
723   *partition = p->partition;
724   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
730 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
731 {
732   PetscFunctionBegin;
733   part->ops->view      = PetscPartitionerView_Shell;
734   part->ops->destroy   = PetscPartitionerDestroy_Shell;
735   part->ops->partition = PetscPartitionerPartition_Shell;
736   PetscFunctionReturn(0);
737 }
738 
739 /*MC
740   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
741 
742   Level: intermediate
743 
744 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
745 M*/
746 
747 #undef __FUNCT__
748 #define __FUNCT__ "PetscPartitionerCreate_Shell"
749 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
750 {
751   PetscPartitioner_Shell *p;
752   PetscErrorCode          ierr;
753 
754   PetscFunctionBegin;
755   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
756   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
757   part->data = p;
758 
759   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "PetscPartitionerShellSetPartition"
765 /*@C
766   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
767 
768   Collective on PART
769 
770   Input Parameters:
771 + part     - The PetscPartitioner
772 . numProcs - The number of partitions
773 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
774 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
775 
776   Level: developer
777 
778   Notes:
779 
780     It is safe to free the sizes and points arrays after use in this routine.
781 
782 .seealso DMPlexDistribute(), PetscPartitionerCreate()
783 @*/
784 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
785 {
786   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
787   PetscInt                proc, numPoints;
788   PetscErrorCode          ierr;
789 
790   PetscFunctionBegin;
791   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
792   if (sizes) {PetscValidPointer(sizes, 3);}
793   if (sizes) {PetscValidPointer(points, 4);}
794   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
795   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
796   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
797   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
798   if (sizes) {
799     for (proc = 0; proc < numProcs; ++proc) {
800       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
801     }
802   }
803   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
804   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
805   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 #undef __FUNCT__
810 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
811 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
812 {
813   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
814   PetscErrorCode          ierr;
815 
816   PetscFunctionBegin;
817   ierr = PetscFree(p);CHKERRQ(ierr);
818   PetscFunctionReturn(0);
819 }
820 
821 #undef __FUNCT__
822 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
823 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
824 {
825   PetscViewerFormat format;
826   PetscErrorCode    ierr;
827 
828   PetscFunctionBegin;
829   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
830   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
831   PetscFunctionReturn(0);
832 }
833 
834 #undef __FUNCT__
835 #define __FUNCT__ "PetscPartitionerView_Simple"
836 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
837 {
838   PetscBool      iascii;
839   PetscErrorCode ierr;
840 
841   PetscFunctionBegin;
842   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
843   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
844   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
845   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
846   PetscFunctionReturn(0);
847 }
848 
849 #undef __FUNCT__
850 #define __FUNCT__ "PetscPartitionerPartition_Simple"
851 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
852 {
853   MPI_Comm       comm;
854   PetscInt       np;
855   PetscMPIInt    size;
856   PetscErrorCode ierr;
857 
858   PetscFunctionBegin;
859   comm = PetscObjectComm((PetscObject)dm);
860   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
861   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
862   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
863   if (size == 1) {
864     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
865   }
866   else {
867     PetscMPIInt rank;
868     PetscInt nvGlobal, *offsets, myFirst, myLast;
869 
870     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
871     offsets[0] = 0;
872     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
873     for (np = 2; np <= size; np++) {
874       offsets[np] += offsets[np-1];
875     }
876     nvGlobal = offsets[size];
877     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
878     myFirst = offsets[rank];
879     myLast  = offsets[rank + 1] - 1;
880     ierr = PetscFree(offsets);CHKERRQ(ierr);
881     if (numVertices) {
882       PetscInt firstPart = 0, firstLargePart = 0;
883       PetscInt lastPart = 0, lastLargePart = 0;
884       PetscInt rem = nvGlobal % nparts;
885       PetscInt pSmall = nvGlobal/nparts;
886       PetscInt pBig = nvGlobal/nparts + 1;
887 
888 
889       if (rem) {
890         firstLargePart = myFirst / pBig;
891         lastLargePart  = myLast  / pBig;
892 
893         if (firstLargePart < rem) {
894           firstPart = firstLargePart;
895         }
896         else {
897           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
898         }
899         if (lastLargePart < rem) {
900           lastPart = lastLargePart;
901         }
902         else {
903           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
904         }
905       }
906       else {
907         firstPart = myFirst / (nvGlobal/nparts);
908         lastPart  = myLast  / (nvGlobal/nparts);
909       }
910 
911       for (np = firstPart; np <= lastPart; np++) {
912         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
913         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
914 
915         PartStart = PetscMax(PartStart,myFirst);
916         PartEnd   = PetscMin(PartEnd,myLast+1);
917         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
918       }
919     }
920   }
921   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 #undef __FUNCT__
926 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
927 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
928 {
929   PetscFunctionBegin;
930   part->ops->view      = PetscPartitionerView_Simple;
931   part->ops->destroy   = PetscPartitionerDestroy_Simple;
932   part->ops->partition = PetscPartitionerPartition_Simple;
933   PetscFunctionReturn(0);
934 }
935 
936 /*MC
937   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
938 
939   Level: intermediate
940 
941 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
942 M*/
943 
944 #undef __FUNCT__
945 #define __FUNCT__ "PetscPartitionerCreate_Simple"
946 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
947 {
948   PetscPartitioner_Simple *p;
949   PetscErrorCode           ierr;
950 
951   PetscFunctionBegin;
952   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
953   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
954   part->data = p;
955 
956   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 #undef __FUNCT__
961 #define __FUNCT__ "PetscPartitionerDestroy_Gather"
962 PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
963 {
964   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
965   PetscErrorCode          ierr;
966 
967   PetscFunctionBegin;
968   ierr = PetscFree(p);CHKERRQ(ierr);
969   PetscFunctionReturn(0);
970 }
971 
972 #undef __FUNCT__
973 #define __FUNCT__ "PetscPartitionerView_Gather_Ascii"
974 PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
975 {
976   PetscViewerFormat format;
977   PetscErrorCode    ierr;
978 
979   PetscFunctionBegin;
980   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
981   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
982   PetscFunctionReturn(0);
983 }
984 
985 #undef __FUNCT__
986 #define __FUNCT__ "PetscPartitionerView_Gather"
987 PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
988 {
989   PetscBool      iascii;
990   PetscErrorCode ierr;
991 
992   PetscFunctionBegin;
993   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
994   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
995   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
996   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
997   PetscFunctionReturn(0);
998 }
999 
1000 #undef __FUNCT__
1001 #define __FUNCT__ "PetscPartitionerPartition_Gather"
1002 PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1003 {
1004   PetscInt       np;
1005   PetscErrorCode ierr;
1006 
1007   PetscFunctionBegin;
1008   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1009   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1010   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1011   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1012   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1013   PetscFunctionReturn(0);
1014 }
1015 
1016 #undef __FUNCT__
1017 #define __FUNCT__ "PetscPartitionerInitialize_Gather"
1018 PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1019 {
1020   PetscFunctionBegin;
1021   part->ops->view      = PetscPartitionerView_Gather;
1022   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1023   part->ops->partition = PetscPartitionerPartition_Gather;
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 /*MC
1028   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1029 
1030   Level: intermediate
1031 
1032 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1033 M*/
1034 
1035 #undef __FUNCT__
1036 #define __FUNCT__ "PetscPartitionerCreate_Gather"
1037 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1038 {
1039   PetscPartitioner_Gather *p;
1040   PetscErrorCode           ierr;
1041 
1042   PetscFunctionBegin;
1043   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1044   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1045   part->data = p;
1046 
1047   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1048   PetscFunctionReturn(0);
1049 }
1050 
1051 
1052 #undef __FUNCT__
1053 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
1054 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1055 {
1056   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1057   PetscErrorCode          ierr;
1058 
1059   PetscFunctionBegin;
1060   ierr = PetscFree(p);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 #undef __FUNCT__
1065 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
1066 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1067 {
1068   PetscViewerFormat format;
1069   PetscErrorCode    ierr;
1070 
1071   PetscFunctionBegin;
1072   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1073   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 #undef __FUNCT__
1078 #define __FUNCT__ "PetscPartitionerView_Chaco"
1079 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1080 {
1081   PetscBool      iascii;
1082   PetscErrorCode ierr;
1083 
1084   PetscFunctionBegin;
1085   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1086   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1087   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1088   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1089   PetscFunctionReturn(0);
1090 }
1091 
1092 #if defined(PETSC_HAVE_CHACO)
1093 #if defined(PETSC_HAVE_UNISTD_H)
1094 #include <unistd.h>
1095 #endif
1096 /* Chaco does not have an include file */
1097 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1098                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1099                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1100                        int mesh_dims[3], double *goal, int global_method, int local_method,
1101                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1102 
1103 extern int FREE_GRAPH;
1104 #endif
1105 
1106 #undef __FUNCT__
1107 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1108 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1109 {
1110 #if defined(PETSC_HAVE_CHACO)
1111   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1112   MPI_Comm       comm;
1113   int            nvtxs          = numVertices; /* number of vertices in full graph */
1114   int           *vwgts          = NULL;   /* weights for all vertices */
1115   float         *ewgts          = NULL;   /* weights for all edges */
1116   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1117   char          *outassignname  = NULL;   /*  name of assignment output file */
1118   char          *outfilename    = NULL;   /* output file name */
1119   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1120   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1121   int            mesh_dims[3];            /* dimensions of mesh of processors */
1122   double        *goal          = NULL;    /* desired set sizes for each set */
1123   int            global_method = 1;       /* global partitioning algorithm */
1124   int            local_method  = 1;       /* local partitioning algorithm */
1125   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1126   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1127   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1128   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1129   long           seed          = 123636512; /* for random graph mutations */
1130   short int     *assignment;              /* Output partition */
1131   int            fd_stdout, fd_pipe[2];
1132   PetscInt      *points;
1133   int            i, v, p;
1134   PetscErrorCode ierr;
1135 
1136   PetscFunctionBegin;
1137   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1138   if (!numVertices) {
1139     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1140     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1141     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1142     PetscFunctionReturn(0);
1143   }
1144   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1145   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1146 
1147   if (global_method == INERTIAL_METHOD) {
1148     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1149     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1150   }
1151   mesh_dims[0] = nparts;
1152   mesh_dims[1] = 1;
1153   mesh_dims[2] = 1;
1154   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1155   /* Chaco outputs to stdout. We redirect this to a buffer. */
1156   /* TODO: check error codes for UNIX calls */
1157 #if defined(PETSC_HAVE_UNISTD_H)
1158   {
1159     int piperet;
1160     piperet = pipe(fd_pipe);
1161     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1162     fd_stdout = dup(1);
1163     close(1);
1164     dup2(fd_pipe[1], 1);
1165   }
1166 #endif
1167   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1168                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1169                    vmax, ndims, eigtol, seed);
1170 #if defined(PETSC_HAVE_UNISTD_H)
1171   {
1172     char msgLog[10000];
1173     int  count;
1174 
1175     fflush(stdout);
1176     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1177     if (count < 0) count = 0;
1178     msgLog[count] = 0;
1179     close(1);
1180     dup2(fd_stdout, 1);
1181     close(fd_stdout);
1182     close(fd_pipe[0]);
1183     close(fd_pipe[1]);
1184     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1185   }
1186 #endif
1187   /* Convert to PetscSection+IS */
1188   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1189   for (v = 0; v < nvtxs; ++v) {
1190     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1191   }
1192   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1193   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1194   for (p = 0, i = 0; p < nparts; ++p) {
1195     for (v = 0; v < nvtxs; ++v) {
1196       if (assignment[v] == p) points[i++] = v;
1197     }
1198   }
1199   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1200   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1201   if (global_method == INERTIAL_METHOD) {
1202     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1203   }
1204   ierr = PetscFree(assignment);CHKERRQ(ierr);
1205   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1206   PetscFunctionReturn(0);
1207 #else
1208   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1209 #endif
1210 }
1211 
1212 #undef __FUNCT__
1213 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1214 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1215 {
1216   PetscFunctionBegin;
1217   part->ops->view      = PetscPartitionerView_Chaco;
1218   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1219   part->ops->partition = PetscPartitionerPartition_Chaco;
1220   PetscFunctionReturn(0);
1221 }
1222 
1223 /*MC
1224   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1225 
1226   Level: intermediate
1227 
1228 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1229 M*/
1230 
1231 #undef __FUNCT__
1232 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1233 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1234 {
1235   PetscPartitioner_Chaco *p;
1236   PetscErrorCode          ierr;
1237 
1238   PetscFunctionBegin;
1239   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1240   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1241   part->data = p;
1242 
1243   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1244   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1245   PetscFunctionReturn(0);
1246 }
1247 
1248 #undef __FUNCT__
1249 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1250 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1251 {
1252   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1253   PetscErrorCode             ierr;
1254 
1255   PetscFunctionBegin;
1256   ierr = PetscFree(p);CHKERRQ(ierr);
1257   PetscFunctionReturn(0);
1258 }
1259 
1260 #undef __FUNCT__
1261 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1262 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1263 {
1264   PetscViewerFormat format;
1265   PetscErrorCode    ierr;
1266 
1267   PetscFunctionBegin;
1268   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1269   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1270   PetscFunctionReturn(0);
1271 }
1272 
1273 #undef __FUNCT__
1274 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1275 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1276 {
1277   PetscBool      iascii;
1278   PetscErrorCode ierr;
1279 
1280   PetscFunctionBegin;
1281   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1282   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1283   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1284   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 #if defined(PETSC_HAVE_PARMETIS)
1289 #include <parmetis.h>
1290 #endif
1291 
1292 #undef __FUNCT__
1293 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1294 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1295 {
1296 #if defined(PETSC_HAVE_PARMETIS)
1297   MPI_Comm       comm;
1298   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1299   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1300   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1301   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1302   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1303   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1304   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1305   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1306   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1307   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1308   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1309   PetscInt       options[5];               /* Options */
1310   /* Outputs */
1311   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1312   PetscInt      *assignment, *points;
1313   PetscMPIInt    rank, p, v, i;
1314   PetscErrorCode ierr;
1315 
1316   PetscFunctionBegin;
1317   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1318   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1319   options[0] = 0; /* Use all defaults */
1320   /* Calculate vertex distribution */
1321   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1322   vtxdist[0] = 0;
1323   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1324   for (p = 2; p <= nparts; ++p) {
1325     vtxdist[p] += vtxdist[p-1];
1326   }
1327   /* Calculate weights */
1328   for (p = 0; p < nparts; ++p) {
1329     tpwgts[p] = 1.0/nparts;
1330   }
1331   ubvec[0] = 1.05;
1332 
1333   if (nparts == 1) {
1334     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1335   } else {
1336     if (vtxdist[1] == vtxdist[nparts]) {
1337       if (!rank) {
1338         PetscStackPush("METIS_PartGraphKway");
1339         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1340         PetscStackPop;
1341         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1342       }
1343     } else {
1344       PetscStackPush("ParMETIS_V3_PartKway");
1345       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1346       PetscStackPop;
1347       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1348     }
1349   }
1350   /* Convert to PetscSection+IS */
1351   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1352   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1353   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1354   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1355   for (p = 0, i = 0; p < nparts; ++p) {
1356     for (v = 0; v < nvtxs; ++v) {
1357       if (assignment[v] == p) points[i++] = v;
1358     }
1359   }
1360   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1361   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1362   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1363   PetscFunctionReturn(0);
1364 #else
1365   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1366 #endif
1367 }
1368 
1369 #undef __FUNCT__
1370 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1371 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1372 {
1373   PetscFunctionBegin;
1374   part->ops->view      = PetscPartitionerView_ParMetis;
1375   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1376   part->ops->partition = PetscPartitionerPartition_ParMetis;
1377   PetscFunctionReturn(0);
1378 }
1379 
1380 /*MC
1381   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1382 
1383   Level: intermediate
1384 
1385 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1386 M*/
1387 
1388 #undef __FUNCT__
1389 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1390 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1391 {
1392   PetscPartitioner_ParMetis *p;
1393   PetscErrorCode          ierr;
1394 
1395   PetscFunctionBegin;
1396   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1397   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1398   part->data = p;
1399 
1400   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1401   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1402   PetscFunctionReturn(0);
1403 }
1404 
1405 #undef __FUNCT__
1406 #define __FUNCT__ "DMPlexGetPartitioner"
1407 /*@
1408   DMPlexGetPartitioner - Get the mesh partitioner
1409 
1410   Not collective
1411 
1412   Input Parameter:
1413 . dm - The DM
1414 
1415   Output Parameter:
1416 . part - The PetscPartitioner
1417 
1418   Level: developer
1419 
1420   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1421 
1422 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1423 @*/
1424 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1425 {
1426   DM_Plex *mesh = (DM_Plex *) dm->data;
1427 
1428   PetscFunctionBegin;
1429   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1430   PetscValidPointer(part, 2);
1431   *part = mesh->partitioner;
1432   PetscFunctionReturn(0);
1433 }
1434 
1435 #undef __FUNCT__
1436 #define __FUNCT__ "DMPlexSetPartitioner"
1437 /*@
1438   DMPlexSetPartitioner - Set the mesh partitioner
1439 
1440   logically collective on dm and part
1441 
1442   Input Parameters:
1443 + dm - The DM
1444 - part - The partitioner
1445 
1446   Level: developer
1447 
1448   Note: Any existing PetscPartitioner will be destroyed.
1449 
1450 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1451 @*/
1452 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1453 {
1454   DM_Plex       *mesh = (DM_Plex *) dm->data;
1455   PetscErrorCode ierr;
1456 
1457   PetscFunctionBegin;
1458   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1459   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1460   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1461   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1462   mesh->partitioner = part;
1463   PetscFunctionReturn(0);
1464 }
1465 
1466 #undef __FUNCT__
1467 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1468 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1469 {
1470   PetscErrorCode ierr;
1471 
1472   PetscFunctionBegin;
1473   if (up) {
1474     PetscInt parent;
1475 
1476     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1477     if (parent != point) {
1478       PetscInt closureSize, *closure = NULL, i;
1479 
1480       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1481       for (i = 0; i < closureSize; i++) {
1482         PetscInt cpoint = closure[2*i];
1483 
1484         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1485         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1486       }
1487       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1488     }
1489   }
1490   if (down) {
1491     PetscInt numChildren;
1492     const PetscInt *children;
1493 
1494     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1495     if (numChildren) {
1496       PetscInt i;
1497 
1498       for (i = 0; i < numChildren; i++) {
1499         PetscInt cpoint = children[i];
1500 
1501         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1502         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1503       }
1504     }
1505   }
1506   PetscFunctionReturn(0);
1507 }
1508 
1509 #undef __FUNCT__
1510 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1511 /*@
1512   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1513 
1514   Input Parameters:
1515 + dm     - The DM
1516 - label  - DMLabel assinging ranks to remote roots
1517 
1518   Level: developer
1519 
1520 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1521 @*/
1522 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1523 {
1524   IS              rankIS,   pointIS;
1525   const PetscInt *ranks,   *points;
1526   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1527   PetscInt       *closure = NULL;
1528   PetscErrorCode  ierr;
1529 
1530   PetscFunctionBegin;
1531   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1532   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1533   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1534   for (r = 0; r < numRanks; ++r) {
1535     const PetscInt rank = ranks[r];
1536 
1537     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1538     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1539     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1540     for (p = 0; p < numPoints; ++p) {
1541       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1542       for (c = 0; c < closureSize*2; c += 2) {
1543         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1544         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1545       }
1546     }
1547     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1548     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1549   }
1550   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1551   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1552   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 #undef __FUNCT__
1557 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1558 /*@
1559   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1560 
1561   Input Parameters:
1562 + dm     - The DM
1563 - label  - DMLabel assinging ranks to remote roots
1564 
1565   Level: developer
1566 
1567 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1568 @*/
1569 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1570 {
1571   IS              rankIS,   pointIS;
1572   const PetscInt *ranks,   *points;
1573   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1574   PetscInt       *adj = NULL;
1575   PetscErrorCode  ierr;
1576 
1577   PetscFunctionBegin;
1578   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1579   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1580   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1581   for (r = 0; r < numRanks; ++r) {
1582     const PetscInt rank = ranks[r];
1583 
1584     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1585     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1586     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1587     for (p = 0; p < numPoints; ++p) {
1588       adjSize = PETSC_DETERMINE;
1589       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1590       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1591     }
1592     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1593     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1594   }
1595   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1596   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1597   ierr = PetscFree(adj);CHKERRQ(ierr);
1598   PetscFunctionReturn(0);
1599 }
1600 
1601 #undef __FUNCT__
1602 #define __FUNCT__ "DMPlexPartitionLabelPropagate"
1603 /*@
1604   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1605 
1606   Input Parameters:
1607 + dm     - The DM
1608 - label  - DMLabel assinging ranks to remote roots
1609 
1610   Level: developer
1611 
1612   Note: This is required when generating multi-level overlaps to capture
1613   overlap points from non-neighbouring partitions.
1614 
1615 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1616 @*/
1617 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1618 {
1619   MPI_Comm        comm;
1620   PetscMPIInt     rank;
1621   PetscSF         sfPoint;
1622   DMLabel         lblRoots, lblLeaves;
1623   IS              rankIS, pointIS;
1624   const PetscInt *ranks;
1625   PetscInt        numRanks, r;
1626   PetscErrorCode  ierr;
1627 
1628   PetscFunctionBegin;
1629   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1630   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1631   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1632   /* Pull point contributions from remote leaves into local roots */
1633   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1634   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1635   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1636   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1637   for (r = 0; r < numRanks; ++r) {
1638     const PetscInt remoteRank = ranks[r];
1639     if (remoteRank == rank) continue;
1640     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1641     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1642     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1643   }
1644   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1645   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1646   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1647   /* Push point contributions from roots into remote leaves */
1648   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1649   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1650   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1651   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1652   for (r = 0; r < numRanks; ++r) {
1653     const PetscInt remoteRank = ranks[r];
1654     if (remoteRank == rank) continue;
1655     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1656     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1657     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1658   }
1659   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1660   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1661   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1662   PetscFunctionReturn(0);
1663 }
1664 
1665 #undef __FUNCT__
1666 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1667 /*@
1668   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1669 
1670   Input Parameters:
1671 + dm        - The DM
1672 . rootLabel - DMLabel assinging ranks to local roots
1673 . processSF - A star forest mapping into the local index on each remote rank
1674 
1675   Output Parameter:
1676 - leafLabel - DMLabel assinging ranks to remote roots
1677 
1678   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1679   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1680 
1681   Level: developer
1682 
1683 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1684 @*/
1685 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1686 {
1687   MPI_Comm           comm;
1688   PetscMPIInt        rank, numProcs;
1689   PetscInt           p, n, numNeighbors, size, l, nleaves;
1690   PetscSF            sfPoint;
1691   PetscSFNode       *rootPoints, *leafPoints;
1692   PetscSection       rootSection, leafSection;
1693   const PetscSFNode *remote;
1694   const PetscInt    *local, *neighbors;
1695   IS                 valueIS;
1696   PetscErrorCode     ierr;
1697 
1698   PetscFunctionBegin;
1699   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1700   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1701   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1702   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1703 
1704   /* Convert to (point, rank) and use actual owners */
1705   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1706   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1707   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1708   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1709   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1710   for (n = 0; n < numNeighbors; ++n) {
1711     PetscInt numPoints;
1712 
1713     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1714     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1715   }
1716   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1717   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1718   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1719   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1720   for (n = 0; n < numNeighbors; ++n) {
1721     IS              pointIS;
1722     const PetscInt *points;
1723     PetscInt        off, numPoints, p;
1724 
1725     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1726     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1727     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1728     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1729     for (p = 0; p < numPoints; ++p) {
1730       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1731       else       {l = -1;}
1732       if (l >= 0) {rootPoints[off+p] = remote[l];}
1733       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1734     }
1735     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1736     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1737   }
1738   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1739   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1740   /* Communicate overlap */
1741   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1742   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1743   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1744   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1745   for (p = 0; p < size; p++) {
1746     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1747   }
1748   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1749   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1750   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1751   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1752   PetscFunctionReturn(0);
1753 }
1754 
1755 #undef __FUNCT__
1756 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1757 /*@
1758   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1759 
1760   Input Parameters:
1761 + dm    - The DM
1762 . label - DMLabel assinging ranks to remote roots
1763 
1764   Output Parameter:
1765 - sf    - The star forest communication context encapsulating the defined mapping
1766 
1767   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1768 
1769   Level: developer
1770 
1771 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1772 @*/
1773 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1774 {
1775   PetscMPIInt     rank, numProcs;
1776   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1777   PetscSFNode    *remotePoints;
1778   IS              remoteRootIS;
1779   const PetscInt *remoteRoots;
1780   PetscErrorCode ierr;
1781 
1782   PetscFunctionBegin;
1783   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1784   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1785 
1786   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1787     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1788     numRemote += numPoints;
1789   }
1790   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1791   /* Put owned points first */
1792   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1793   if (numPoints > 0) {
1794     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1795     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1796     for (p = 0; p < numPoints; p++) {
1797       remotePoints[idx].index = remoteRoots[p];
1798       remotePoints[idx].rank = rank;
1799       idx++;
1800     }
1801     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1802     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1803   }
1804   /* Now add remote points */
1805   for (n = 0; n < numProcs; ++n) {
1806     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1807     if (numPoints <= 0 || n == rank) continue;
1808     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1809     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1810     for (p = 0; p < numPoints; p++) {
1811       remotePoints[idx].index = remoteRoots[p];
1812       remotePoints[idx].rank = n;
1813       idx++;
1814     }
1815     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1816     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1817   }
1818   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1819   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1820   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1821   PetscFunctionReturn(0);
1822 }
1823