xref: /petsc/src/dm/impls/plex/plexpartition.c (revision e611a964e9853b74d61a56642fe9d06a6e51780f)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
79   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
80   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
81     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
82     if (nroots > 0) {if (cellNum[p] < 0) continue;}
83     adjSize = PETSC_DETERMINE;
84     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
85     for (a = 0; a < adjSize; ++a) {
86       const PetscInt point = adj[a];
87       if (point != p && pStart <= point && point < pEnd) {
88         PetscInt *PETSC_RESTRICT pBuf;
89         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
90         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
91         *pBuf = point;
92       }
93     }
94     (*numVertices)++;
95   }
96   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
97   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
98   /* Derive CSR graph from section/segbuffer */
99   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
100   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
101   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
102   for (idx = 0, p = pStart; p < pEnd; p++) {
103     if (nroots > 0) {if (cellNum[p] < 0) continue;}
104     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
105   }
106   vOffsets[*numVertices] = size;
107   if (offsets) *offsets = vOffsets;
108   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
109   {
110     ISLocalToGlobalMapping ltogCells;
111     PetscInt n, size, *cells_arr;
112     /* In parallel, apply a global cell numbering to the graph */
113     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
114     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
115     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
117     /* Convert to positive global cell numbers */
118     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
119     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
121     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
122     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
123   }
124   if (adjacency) *adjacency = graph;
125   /* Clean up */
126   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
127   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
128   ierr = PetscFree(adj);CHKERRQ(ierr);
129   PetscFunctionReturn(0);
130 }
131 
132 #undef __FUNCT__
133 #define __FUNCT__ "DMPlexCreateNeighborCSR"
134 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
135 {
136   const PetscInt maxFaceCases = 30;
137   PetscInt       numFaceCases = 0;
138   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
139   PetscInt      *off, *adj;
140   PetscInt      *neighborCells = NULL;
141   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   /* For parallel partitioning, I think you have to communicate supports */
146   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
147   cellDim = dim - cellHeight;
148   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
149   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
150   if (cEnd - cStart == 0) {
151     if (numVertices) *numVertices = 0;
152     if (offsets)   *offsets   = NULL;
153     if (adjacency) *adjacency = NULL;
154     PetscFunctionReturn(0);
155   }
156   numCells  = cEnd - cStart;
157   faceDepth = depth - cellHeight;
158   if (dim == depth) {
159     PetscInt f, fStart, fEnd;
160 
161     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
162     /* Count neighboring cells */
163     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
164     for (f = fStart; f < fEnd; ++f) {
165       const PetscInt *support;
166       PetscInt        supportSize;
167       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
168       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
169       if (supportSize == 2) {
170         PetscInt numChildren;
171 
172         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
173         if (!numChildren) {
174           ++off[support[0]-cStart+1];
175           ++off[support[1]-cStart+1];
176         }
177       }
178     }
179     /* Prefix sum */
180     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
181     if (adjacency) {
182       PetscInt *tmp;
183 
184       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
185       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
186       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
187       /* Get neighboring cells */
188       for (f = fStart; f < fEnd; ++f) {
189         const PetscInt *support;
190         PetscInt        supportSize;
191         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
192         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
193         if (supportSize == 2) {
194           PetscInt numChildren;
195 
196           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
197           if (!numChildren) {
198             adj[tmp[support[0]-cStart]++] = support[1];
199             adj[tmp[support[1]-cStart]++] = support[0];
200           }
201         }
202       }
203 #if defined(PETSC_USE_DEBUG)
204       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
205 #endif
206       ierr = PetscFree(tmp);CHKERRQ(ierr);
207     }
208     if (numVertices) *numVertices = numCells;
209     if (offsets)   *offsets   = off;
210     if (adjacency) *adjacency = adj;
211     PetscFunctionReturn(0);
212   }
213   /* Setup face recognition */
214   if (faceDepth == 1) {
215     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
216 
217     for (c = cStart; c < cEnd; ++c) {
218       PetscInt corners;
219 
220       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
221       if (!cornersSeen[corners]) {
222         PetscInt nFV;
223 
224         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
225         cornersSeen[corners] = 1;
226 
227         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
228 
229         numFaceVertices[numFaceCases++] = nFV;
230       }
231     }
232   }
233   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
234   /* Count neighboring cells */
235   for (cell = cStart; cell < cEnd; ++cell) {
236     PetscInt numNeighbors = PETSC_DETERMINE, n;
237 
238     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
239     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
240     for (n = 0; n < numNeighbors; ++n) {
241       PetscInt        cellPair[2];
242       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
243       PetscInt        meetSize = 0;
244       const PetscInt *meet    = NULL;
245 
246       cellPair[0] = cell; cellPair[1] = neighborCells[n];
247       if (cellPair[0] == cellPair[1]) continue;
248       if (!found) {
249         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
250         if (meetSize) {
251           PetscInt f;
252 
253           for (f = 0; f < numFaceCases; ++f) {
254             if (numFaceVertices[f] == meetSize) {
255               found = PETSC_TRUE;
256               break;
257             }
258           }
259         }
260         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
261       }
262       if (found) ++off[cell-cStart+1];
263     }
264   }
265   /* Prefix sum */
266   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
267 
268   if (adjacency) {
269     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
270     /* Get neighboring cells */
271     for (cell = cStart; cell < cEnd; ++cell) {
272       PetscInt numNeighbors = PETSC_DETERMINE, n;
273       PetscInt cellOffset   = 0;
274 
275       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
276       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
277       for (n = 0; n < numNeighbors; ++n) {
278         PetscInt        cellPair[2];
279         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
280         PetscInt        meetSize = 0;
281         const PetscInt *meet    = NULL;
282 
283         cellPair[0] = cell; cellPair[1] = neighborCells[n];
284         if (cellPair[0] == cellPair[1]) continue;
285         if (!found) {
286           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
287           if (meetSize) {
288             PetscInt f;
289 
290             for (f = 0; f < numFaceCases; ++f) {
291               if (numFaceVertices[f] == meetSize) {
292                 found = PETSC_TRUE;
293                 break;
294               }
295             }
296           }
297           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
298         }
299         if (found) {
300           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
301           ++cellOffset;
302         }
303       }
304     }
305   }
306   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
307   if (numVertices) *numVertices = numCells;
308   if (offsets)   *offsets   = off;
309   if (adjacency) *adjacency = adj;
310   PetscFunctionReturn(0);
311 }
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "PetscPartitionerRegister"
315 /*@C
316   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
317 
318   Not Collective
319 
320   Input Parameters:
321 + name        - The name of a new user-defined creation routine
322 - create_func - The creation routine itself
323 
324   Notes:
325   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
326 
327   Sample usage:
328 .vb
329     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
330 .ve
331 
332   Then, your PetscPartitioner type can be chosen with the procedural interface via
333 .vb
334     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
335     PetscPartitionerSetType(PetscPartitioner, "my_part");
336 .ve
337    or at runtime via the option
338 .vb
339     -petscpartitioner_type my_part
340 .ve
341 
342   Level: advanced
343 
344 .keywords: PetscPartitioner, register
345 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
346 
347 @*/
348 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
349 {
350   PetscErrorCode ierr;
351 
352   PetscFunctionBegin;
353   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
354   PetscFunctionReturn(0);
355 }
356 
357 #undef __FUNCT__
358 #define __FUNCT__ "PetscPartitionerSetType"
359 /*@C
360   PetscPartitionerSetType - Builds a particular PetscPartitioner
361 
362   Collective on PetscPartitioner
363 
364   Input Parameters:
365 + part - The PetscPartitioner object
366 - name - The kind of partitioner
367 
368   Options Database Key:
369 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
370 
371   Level: intermediate
372 
373 .keywords: PetscPartitioner, set, type
374 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
375 @*/
376 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
377 {
378   PetscErrorCode (*r)(PetscPartitioner);
379   PetscBool      match;
380   PetscErrorCode ierr;
381 
382   PetscFunctionBegin;
383   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
384   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
385   if (match) PetscFunctionReturn(0);
386 
387   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
388   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
389   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
390 
391   if (part->ops->destroy) {
392     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
393     part->ops->destroy = NULL;
394   }
395   ierr = (*r)(part);CHKERRQ(ierr);
396   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
397   PetscFunctionReturn(0);
398 }
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "PetscPartitionerGetType"
402 /*@C
403   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
404 
405   Not Collective
406 
407   Input Parameter:
408 . part - The PetscPartitioner
409 
410   Output Parameter:
411 . name - The PetscPartitioner type name
412 
413   Level: intermediate
414 
415 .keywords: PetscPartitioner, get, type, name
416 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
417 @*/
418 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
419 {
420   PetscErrorCode ierr;
421 
422   PetscFunctionBegin;
423   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
424   PetscValidPointer(name, 2);
425   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
426   *name = ((PetscObject) part)->type_name;
427   PetscFunctionReturn(0);
428 }
429 
430 #undef __FUNCT__
431 #define __FUNCT__ "PetscPartitionerView"
432 /*@C
433   PetscPartitionerView - Views a PetscPartitioner
434 
435   Collective on PetscPartitioner
436 
437   Input Parameter:
438 + part - the PetscPartitioner object to view
439 - v    - the viewer
440 
441   Level: developer
442 
443 .seealso: PetscPartitionerDestroy()
444 @*/
445 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
446 {
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
451   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
452   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
453   PetscFunctionReturn(0);
454 }
455 
456 #undef __FUNCT__
457 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
458 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
459 {
460   const char    *defaultType;
461   char           name[256];
462   PetscBool      flg;
463   PetscErrorCode ierr;
464 
465   PetscFunctionBegin;
466   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
467   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
468   else                                  defaultType = ((PetscObject) part)->type_name;
469   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
470 
471   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
472   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
473   if (flg) {
474     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
475   } else if (!((PetscObject) part)->type_name) {
476     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
477   }
478   ierr = PetscOptionsEnd();CHKERRQ(ierr);
479   PetscFunctionReturn(0);
480 }
481 
482 #undef __FUNCT__
483 #define __FUNCT__ "PetscPartitionerSetFromOptions"
484 /*@
485   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
486 
487   Collective on PetscPartitioner
488 
489   Input Parameter:
490 . part - the PetscPartitioner object to set options for
491 
492   Level: developer
493 
494 .seealso: PetscPartitionerView()
495 @*/
496 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
497 {
498   PetscErrorCode ierr;
499 
500   PetscFunctionBegin;
501   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
502   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
503 
504   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
505   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
506   /* process any options handlers added with PetscObjectAddOptionsHandler() */
507   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
508   ierr = PetscOptionsEnd();CHKERRQ(ierr);
509   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
510   PetscFunctionReturn(0);
511 }
512 
513 #undef __FUNCT__
514 #define __FUNCT__ "PetscPartitionerSetUp"
515 /*@C
516   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
517 
518   Collective on PetscPartitioner
519 
520   Input Parameter:
521 . part - the PetscPartitioner object to setup
522 
523   Level: developer
524 
525 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
526 @*/
527 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
528 {
529   PetscErrorCode ierr;
530 
531   PetscFunctionBegin;
532   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
533   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
534   PetscFunctionReturn(0);
535 }
536 
537 #undef __FUNCT__
538 #define __FUNCT__ "PetscPartitionerDestroy"
539 /*@
540   PetscPartitionerDestroy - Destroys a PetscPartitioner object
541 
542   Collective on PetscPartitioner
543 
544   Input Parameter:
545 . part - the PetscPartitioner object to destroy
546 
547   Level: developer
548 
549 .seealso: PetscPartitionerView()
550 @*/
551 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
552 {
553   PetscErrorCode ierr;
554 
555   PetscFunctionBegin;
556   if (!*part) PetscFunctionReturn(0);
557   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
558 
559   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
560   ((PetscObject) (*part))->refct = 0;
561 
562   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
563   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
564   PetscFunctionReturn(0);
565 }
566 
567 #undef __FUNCT__
568 #define __FUNCT__ "PetscPartitionerCreate"
569 /*@
570   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
571 
572   Collective on MPI_Comm
573 
574   Input Parameter:
575 . comm - The communicator for the PetscPartitioner object
576 
577   Output Parameter:
578 . part - The PetscPartitioner object
579 
580   Level: beginner
581 
582 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
583 @*/
584 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
585 {
586   PetscPartitioner p;
587   PetscErrorCode   ierr;
588 
589   PetscFunctionBegin;
590   PetscValidPointer(part, 2);
591   *part = NULL;
592   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
593 
594   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
595 
596   *part = p;
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "PetscPartitionerPartition"
602 /*@
603   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
604 
605   Collective on DM
606 
607   Input Parameters:
608 + part    - The PetscPartitioner
609 - dm      - The mesh DM
610 
611   Output Parameters:
612 + partSection     - The PetscSection giving the division of points by partition
613 - partition       - The list of points by partition
614 
615   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
616 
617   Level: developer
618 
619 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
620 @*/
621 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
622 {
623   PetscMPIInt    size;
624   PetscErrorCode ierr;
625 
626   PetscFunctionBegin;
627   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
628   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
629   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
630   PetscValidPointer(partition, 5);
631   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
632   if (size == 1) {
633     PetscInt *points;
634     PetscInt  cStart, cEnd, c;
635 
636     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
637     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
638     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
639     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
640     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
641     for (c = cStart; c < cEnd; ++c) points[c] = c;
642     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
643     PetscFunctionReturn(0);
644   }
645   if (part->height == 0) {
646     PetscInt  numVertices;
647     PetscInt *start     = NULL;
648     PetscInt *adjacency = NULL;
649 
650     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
651     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
652     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
653     ierr = PetscFree(start);CHKERRQ(ierr);
654     ierr = PetscFree(adjacency);CHKERRQ(ierr);
655   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
656   PetscFunctionReturn(0);
657 }
658 
659 #undef __FUNCT__
660 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
661 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
662 {
663   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
664   PetscErrorCode          ierr;
665 
666   PetscFunctionBegin;
667   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
668   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
669   ierr = PetscFree(p);CHKERRQ(ierr);
670   PetscFunctionReturn(0);
671 }
672 
673 #undef __FUNCT__
674 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
675 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
676 {
677   PetscViewerFormat format;
678   PetscErrorCode    ierr;
679 
680   PetscFunctionBegin;
681   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
682   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
683   PetscFunctionReturn(0);
684 }
685 
686 #undef __FUNCT__
687 #define __FUNCT__ "PetscPartitionerView_Shell"
688 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
689 {
690   PetscBool      iascii;
691   PetscErrorCode ierr;
692 
693   PetscFunctionBegin;
694   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
695   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
696   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
697   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
698   PetscFunctionReturn(0);
699 }
700 
701 #undef __FUNCT__
702 #define __FUNCT__ "PetscPartitionerPartition_Shell"
703 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
704 {
705   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
706   PetscInt                np;
707   PetscErrorCode          ierr;
708 
709   PetscFunctionBegin;
710   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
711   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
712   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
713   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
714   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
715   *partition = p->partition;
716   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
717   PetscFunctionReturn(0);
718 }
719 
720 #undef __FUNCT__
721 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
722 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
723 {
724   PetscFunctionBegin;
725   part->ops->view      = PetscPartitionerView_Shell;
726   part->ops->destroy   = PetscPartitionerDestroy_Shell;
727   part->ops->partition = PetscPartitionerPartition_Shell;
728   PetscFunctionReturn(0);
729 }
730 
731 /*MC
732   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
733 
734   Level: intermediate
735 
736 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
737 M*/
738 
739 #undef __FUNCT__
740 #define __FUNCT__ "PetscPartitionerCreate_Shell"
741 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
742 {
743   PetscPartitioner_Shell *p;
744   PetscErrorCode          ierr;
745 
746   PetscFunctionBegin;
747   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
748   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
749   part->data = p;
750 
751   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
752   PetscFunctionReturn(0);
753 }
754 
755 #undef __FUNCT__
756 #define __FUNCT__ "PetscPartitionerShellSetPartition"
757 /*@C
758   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
759 
760   Collective on PART
761 
762   Input Parameters:
763 + part     - The PetscPartitioner
764 . numProcs - The number of partitions
765 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
766 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
767 
768   Level: developer
769 
770   Notes:
771 
772     It is safe to free the sizes and points arrays after use in this routine.
773 
774 .seealso DMPlexDistribute(), PetscPartitionerCreate()
775 @*/
776 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
777 {
778   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
779   PetscInt                proc, numPoints;
780   PetscErrorCode          ierr;
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
784   if (sizes) {PetscValidPointer(sizes, 3);}
785   if (sizes) {PetscValidPointer(points, 4);}
786   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
787   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
788   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
789   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
790   if (sizes) {
791     for (proc = 0; proc < numProcs; ++proc) {
792       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
793     }
794   }
795   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
796   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
797   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
798   PetscFunctionReturn(0);
799 }
800 
801 #undef __FUNCT__
802 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
803 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
804 {
805   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
806   PetscErrorCode          ierr;
807 
808   PetscFunctionBegin;
809   ierr = PetscFree(p);CHKERRQ(ierr);
810   PetscFunctionReturn(0);
811 }
812 
813 #undef __FUNCT__
814 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
815 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
816 {
817   PetscViewerFormat format;
818   PetscErrorCode    ierr;
819 
820   PetscFunctionBegin;
821   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
822   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
823   PetscFunctionReturn(0);
824 }
825 
826 #undef __FUNCT__
827 #define __FUNCT__ "PetscPartitionerView_Simple"
828 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
829 {
830   PetscBool      iascii;
831   PetscErrorCode ierr;
832 
833   PetscFunctionBegin;
834   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
835   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
836   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
837   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
838   PetscFunctionReturn(0);
839 }
840 
841 #undef __FUNCT__
842 #define __FUNCT__ "PetscPartitionerPartition_Simple"
843 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
844 {
845   MPI_Comm       comm;
846   PetscInt       np;
847   PetscMPIInt    size;
848   PetscErrorCode ierr;
849 
850   PetscFunctionBegin;
851   comm = PetscObjectComm((PetscObject)dm);
852   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
853   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
854   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
855   if (size == 1) {
856     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
857   }
858   else {
859     PetscMPIInt rank;
860     PetscInt nvGlobal, *offsets, myFirst, myLast;
861 
862     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
863     offsets[0] = 0;
864     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
865     for (np = 2; np <= size; np++) {
866       offsets[np] += offsets[np-1];
867     }
868     nvGlobal = offsets[size];
869     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
870     myFirst = offsets[rank];
871     myLast  = offsets[rank + 1] - 1;
872     ierr = PetscFree(offsets);CHKERRQ(ierr);
873     if (numVertices) {
874       PetscInt firstPart = 0, firstLargePart = 0;
875       PetscInt lastPart = 0, lastLargePart = 0;
876       PetscInt rem = nvGlobal % nparts;
877       PetscInt pSmall = nvGlobal/nparts;
878       PetscInt pBig = nvGlobal/nparts + 1;
879 
880 
881       if (rem) {
882         firstLargePart = myFirst / pBig;
883         lastLargePart  = myLast  / pBig;
884 
885         if (firstLargePart < rem) {
886           firstPart = firstLargePart;
887         }
888         else {
889           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
890         }
891         if (lastLargePart < rem) {
892           lastPart = lastLargePart;
893         }
894         else {
895           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
896         }
897       }
898       else {
899         firstPart = myFirst / (nvGlobal/nparts);
900         lastPart  = myLast  / (nvGlobal/nparts);
901       }
902 
903       for (np = firstPart; np <= lastPart; np++) {
904         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
905         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
906 
907         PartStart = PetscMax(PartStart,myFirst);
908         PartEnd   = PetscMin(PartEnd,myLast+1);
909         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
910       }
911     }
912   }
913   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
914   PetscFunctionReturn(0);
915 }
916 
917 #undef __FUNCT__
918 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
919 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
920 {
921   PetscFunctionBegin;
922   part->ops->view      = PetscPartitionerView_Simple;
923   part->ops->destroy   = PetscPartitionerDestroy_Simple;
924   part->ops->partition = PetscPartitionerPartition_Simple;
925   PetscFunctionReturn(0);
926 }
927 
928 /*MC
929   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
930 
931   Level: intermediate
932 
933 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
934 M*/
935 
936 #undef __FUNCT__
937 #define __FUNCT__ "PetscPartitionerCreate_Simple"
938 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
939 {
940   PetscPartitioner_Simple *p;
941   PetscErrorCode           ierr;
942 
943   PetscFunctionBegin;
944   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
945   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
946   part->data = p;
947 
948   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
949   PetscFunctionReturn(0);
950 }
951 
952 #undef __FUNCT__
953 #define __FUNCT__ "PetscPartitionerDestroy_Gather"
954 PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
955 {
956   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
957   PetscErrorCode          ierr;
958 
959   PetscFunctionBegin;
960   ierr = PetscFree(p);CHKERRQ(ierr);
961   PetscFunctionReturn(0);
962 }
963 
964 #undef __FUNCT__
965 #define __FUNCT__ "PetscPartitionerView_Gather_Ascii"
966 PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
967 {
968   PetscViewerFormat format;
969   PetscErrorCode    ierr;
970 
971   PetscFunctionBegin;
972   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
973   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
974   PetscFunctionReturn(0);
975 }
976 
977 #undef __FUNCT__
978 #define __FUNCT__ "PetscPartitionerView_Gather"
979 PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
980 {
981   PetscBool      iascii;
982   PetscErrorCode ierr;
983 
984   PetscFunctionBegin;
985   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
986   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
987   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
988   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
989   PetscFunctionReturn(0);
990 }
991 
992 #undef __FUNCT__
993 #define __FUNCT__ "PetscPartitionerPartition_Gather"
994 PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
995 {
996   PetscInt       np;
997   PetscErrorCode ierr;
998 
999   PetscFunctionBegin;
1000   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1001   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1002   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1003   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1004   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1005   PetscFunctionReturn(0);
1006 }
1007 
1008 #undef __FUNCT__
1009 #define __FUNCT__ "PetscPartitionerInitialize_Gather"
1010 PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1011 {
1012   PetscFunctionBegin;
1013   part->ops->view      = PetscPartitionerView_Gather;
1014   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1015   part->ops->partition = PetscPartitionerPartition_Gather;
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 /*MC
1020   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1021 
1022   Level: intermediate
1023 
1024 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1025 M*/
1026 
1027 #undef __FUNCT__
1028 #define __FUNCT__ "PetscPartitionerCreate_Gather"
1029 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1030 {
1031   PetscPartitioner_Gather *p;
1032   PetscErrorCode           ierr;
1033 
1034   PetscFunctionBegin;
1035   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1036   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1037   part->data = p;
1038 
1039   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1040   PetscFunctionReturn(0);
1041 }
1042 
1043 
1044 #undef __FUNCT__
1045 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
1046 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1047 {
1048   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1049   PetscErrorCode          ierr;
1050 
1051   PetscFunctionBegin;
1052   ierr = PetscFree(p);CHKERRQ(ierr);
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 #undef __FUNCT__
1057 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
1058 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1059 {
1060   PetscViewerFormat format;
1061   PetscErrorCode    ierr;
1062 
1063   PetscFunctionBegin;
1064   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1065   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1066   PetscFunctionReturn(0);
1067 }
1068 
1069 #undef __FUNCT__
1070 #define __FUNCT__ "PetscPartitionerView_Chaco"
1071 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1072 {
1073   PetscBool      iascii;
1074   PetscErrorCode ierr;
1075 
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1078   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1079   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1080   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 #if defined(PETSC_HAVE_CHACO)
1085 #if defined(PETSC_HAVE_UNISTD_H)
1086 #include <unistd.h>
1087 #endif
1088 /* Chaco does not have an include file */
1089 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1090                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1091                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1092                        int mesh_dims[3], double *goal, int global_method, int local_method,
1093                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1094 
1095 extern int FREE_GRAPH;
1096 #endif
1097 
1098 #undef __FUNCT__
1099 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1100 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1101 {
1102 #if defined(PETSC_HAVE_CHACO)
1103   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1104   MPI_Comm       comm;
1105   int            nvtxs          = numVertices; /* number of vertices in full graph */
1106   int           *vwgts          = NULL;   /* weights for all vertices */
1107   float         *ewgts          = NULL;   /* weights for all edges */
1108   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1109   char          *outassignname  = NULL;   /*  name of assignment output file */
1110   char          *outfilename    = NULL;   /* output file name */
1111   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1112   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1113   int            mesh_dims[3];            /* dimensions of mesh of processors */
1114   double        *goal          = NULL;    /* desired set sizes for each set */
1115   int            global_method = 1;       /* global partitioning algorithm */
1116   int            local_method  = 1;       /* local partitioning algorithm */
1117   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1118   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1119   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1120   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1121   long           seed          = 123636512; /* for random graph mutations */
1122   short int     *assignment;              /* Output partition */
1123   int            fd_stdout, fd_pipe[2];
1124   PetscInt      *points;
1125   int            i, v, p;
1126   PetscErrorCode ierr;
1127 
1128   PetscFunctionBegin;
1129   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1130   if (!numVertices) {
1131     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1132     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1133     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1134     PetscFunctionReturn(0);
1135   }
1136   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1137   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1138 
1139   if (global_method == INERTIAL_METHOD) {
1140     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1141     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1142   }
1143   mesh_dims[0] = nparts;
1144   mesh_dims[1] = 1;
1145   mesh_dims[2] = 1;
1146   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1147   /* Chaco outputs to stdout. We redirect this to a buffer. */
1148   /* TODO: check error codes for UNIX calls */
1149 #if defined(PETSC_HAVE_UNISTD_H)
1150   {
1151     int piperet;
1152     piperet = pipe(fd_pipe);
1153     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1154     fd_stdout = dup(1);
1155     close(1);
1156     dup2(fd_pipe[1], 1);
1157   }
1158 #endif
1159   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1160                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1161                    vmax, ndims, eigtol, seed);
1162 #if defined(PETSC_HAVE_UNISTD_H)
1163   {
1164     char msgLog[10000];
1165     int  count;
1166 
1167     fflush(stdout);
1168     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1169     if (count < 0) count = 0;
1170     msgLog[count] = 0;
1171     close(1);
1172     dup2(fd_stdout, 1);
1173     close(fd_stdout);
1174     close(fd_pipe[0]);
1175     close(fd_pipe[1]);
1176     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1177   }
1178 #endif
1179   /* Convert to PetscSection+IS */
1180   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1181   for (v = 0; v < nvtxs; ++v) {
1182     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1183   }
1184   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1185   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1186   for (p = 0, i = 0; p < nparts; ++p) {
1187     for (v = 0; v < nvtxs; ++v) {
1188       if (assignment[v] == p) points[i++] = v;
1189     }
1190   }
1191   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1192   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1193   if (global_method == INERTIAL_METHOD) {
1194     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1195   }
1196   ierr = PetscFree(assignment);CHKERRQ(ierr);
1197   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1198   PetscFunctionReturn(0);
1199 #else
1200   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1201 #endif
1202 }
1203 
1204 #undef __FUNCT__
1205 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1206 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1207 {
1208   PetscFunctionBegin;
1209   part->ops->view      = PetscPartitionerView_Chaco;
1210   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1211   part->ops->partition = PetscPartitionerPartition_Chaco;
1212   PetscFunctionReturn(0);
1213 }
1214 
1215 /*MC
1216   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1217 
1218   Level: intermediate
1219 
1220 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1221 M*/
1222 
1223 #undef __FUNCT__
1224 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1225 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1226 {
1227   PetscPartitioner_Chaco *p;
1228   PetscErrorCode          ierr;
1229 
1230   PetscFunctionBegin;
1231   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1232   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1233   part->data = p;
1234 
1235   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1236   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1237   PetscFunctionReturn(0);
1238 }
1239 
1240 #undef __FUNCT__
1241 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1242 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1243 {
1244   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1245   PetscErrorCode             ierr;
1246 
1247   PetscFunctionBegin;
1248   ierr = PetscFree(p);CHKERRQ(ierr);
1249   PetscFunctionReturn(0);
1250 }
1251 
1252 #undef __FUNCT__
1253 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1254 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1255 {
1256   PetscViewerFormat format;
1257   PetscErrorCode    ierr;
1258 
1259   PetscFunctionBegin;
1260   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1261   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 #undef __FUNCT__
1266 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1267 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1268 {
1269   PetscBool      iascii;
1270   PetscErrorCode ierr;
1271 
1272   PetscFunctionBegin;
1273   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1274   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1275   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1276   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1277   PetscFunctionReturn(0);
1278 }
1279 
1280 #if defined(PETSC_HAVE_PARMETIS)
1281 #include <parmetis.h>
1282 #endif
1283 
1284 #undef __FUNCT__
1285 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1286 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1287 {
1288 #if defined(PETSC_HAVE_PARMETIS)
1289   MPI_Comm       comm;
1290   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1291   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1292   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1293   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1294   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1295   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1296   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1297   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1298   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1299   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1300   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1301   PetscInt       options[5];               /* Options */
1302   /* Outputs */
1303   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1304   PetscInt      *assignment, *points;
1305   PetscMPIInt    rank, p, v, i;
1306   PetscErrorCode ierr;
1307 
1308   PetscFunctionBegin;
1309   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1310   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1311   options[0] = 0; /* Use all defaults */
1312   /* Calculate vertex distribution */
1313   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1314   vtxdist[0] = 0;
1315   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1316   for (p = 2; p <= nparts; ++p) {
1317     vtxdist[p] += vtxdist[p-1];
1318   }
1319   /* Calculate weights */
1320   for (p = 0; p < nparts; ++p) {
1321     tpwgts[p] = 1.0/nparts;
1322   }
1323   ubvec[0] = 1.05;
1324 
1325   if (nparts == 1) {
1326     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1327   } else {
1328     if (vtxdist[1] == vtxdist[nparts]) {
1329       if (!rank) {
1330         PetscStackPush("METIS_PartGraphKway");
1331         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1332         PetscStackPop;
1333         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1334       }
1335     } else {
1336       PetscStackPush("ParMETIS_V3_PartKway");
1337       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1338       PetscStackPop;
1339       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1340     }
1341   }
1342   /* Convert to PetscSection+IS */
1343   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1344   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1345   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1346   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1347   for (p = 0, i = 0; p < nparts; ++p) {
1348     for (v = 0; v < nvtxs; ++v) {
1349       if (assignment[v] == p) points[i++] = v;
1350     }
1351   }
1352   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1353   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1354   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1355   PetscFunctionReturn(0);
1356 #else
1357   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1358 #endif
1359 }
1360 
1361 #undef __FUNCT__
1362 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1363 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1364 {
1365   PetscFunctionBegin;
1366   part->ops->view      = PetscPartitionerView_ParMetis;
1367   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1368   part->ops->partition = PetscPartitionerPartition_ParMetis;
1369   PetscFunctionReturn(0);
1370 }
1371 
1372 /*MC
1373   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1374 
1375   Level: intermediate
1376 
1377 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1378 M*/
1379 
1380 #undef __FUNCT__
1381 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1382 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1383 {
1384   PetscPartitioner_ParMetis *p;
1385   PetscErrorCode          ierr;
1386 
1387   PetscFunctionBegin;
1388   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1389   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1390   part->data = p;
1391 
1392   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1393   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1394   PetscFunctionReturn(0);
1395 }
1396 
1397 #undef __FUNCT__
1398 #define __FUNCT__ "DMPlexGetPartitioner"
1399 /*@
1400   DMPlexGetPartitioner - Get the mesh partitioner
1401 
1402   Not collective
1403 
1404   Input Parameter:
1405 . dm - The DM
1406 
1407   Output Parameter:
1408 . part - The PetscPartitioner
1409 
1410   Level: developer
1411 
1412   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1413 
1414 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1415 @*/
1416 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1417 {
1418   DM_Plex *mesh = (DM_Plex *) dm->data;
1419 
1420   PetscFunctionBegin;
1421   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1422   PetscValidPointer(part, 2);
1423   *part = mesh->partitioner;
1424   PetscFunctionReturn(0);
1425 }
1426 
1427 #undef __FUNCT__
1428 #define __FUNCT__ "DMPlexSetPartitioner"
1429 /*@
1430   DMPlexSetPartitioner - Set the mesh partitioner
1431 
1432   logically collective on dm and part
1433 
1434   Input Parameters:
1435 + dm - The DM
1436 - part - The partitioner
1437 
1438   Level: developer
1439 
1440   Note: Any existing PetscPartitioner will be destroyed.
1441 
1442 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1443 @*/
1444 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1445 {
1446   DM_Plex       *mesh = (DM_Plex *) dm->data;
1447   PetscErrorCode ierr;
1448 
1449   PetscFunctionBegin;
1450   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1452   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1453   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1454   mesh->partitioner = part;
1455   PetscFunctionReturn(0);
1456 }
1457 
1458 #undef __FUNCT__
1459 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1460 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1461 {
1462   PetscErrorCode ierr;
1463 
1464   PetscFunctionBegin;
1465   if (up) {
1466     PetscInt parent;
1467 
1468     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1469     if (parent != point) {
1470       PetscInt closureSize, *closure = NULL, i;
1471 
1472       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1473       for (i = 0; i < closureSize; i++) {
1474         PetscInt cpoint = closure[2*i];
1475 
1476         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1477         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1478       }
1479       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1480     }
1481   }
1482   if (down) {
1483     PetscInt numChildren;
1484     const PetscInt *children;
1485 
1486     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1487     if (numChildren) {
1488       PetscInt i;
1489 
1490       for (i = 0; i < numChildren; i++) {
1491         PetscInt cpoint = children[i];
1492 
1493         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1494         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1495       }
1496     }
1497   }
1498   PetscFunctionReturn(0);
1499 }
1500 
1501 #undef __FUNCT__
1502 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1503 /*@
1504   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1505 
1506   Input Parameters:
1507 + dm     - The DM
1508 - label  - DMLabel assinging ranks to remote roots
1509 
1510   Level: developer
1511 
1512 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1513 @*/
1514 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1515 {
1516   IS              rankIS,   pointIS;
1517   const PetscInt *ranks,   *points;
1518   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1519   PetscInt       *closure = NULL;
1520   PetscErrorCode  ierr;
1521 
1522   PetscFunctionBegin;
1523   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1524   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1525   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1526   for (r = 0; r < numRanks; ++r) {
1527     const PetscInt rank = ranks[r];
1528 
1529     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1530     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1531     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1532     for (p = 0; p < numPoints; ++p) {
1533       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1534       for (c = 0; c < closureSize*2; c += 2) {
1535         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1536         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1537       }
1538     }
1539     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1540     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1541   }
1542   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1543   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1544   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1545   PetscFunctionReturn(0);
1546 }
1547 
1548 #undef __FUNCT__
1549 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1550 /*@
1551   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1552 
1553   Input Parameters:
1554 + dm     - The DM
1555 - label  - DMLabel assinging ranks to remote roots
1556 
1557   Level: developer
1558 
1559 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1560 @*/
1561 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1562 {
1563   IS              rankIS,   pointIS;
1564   const PetscInt *ranks,   *points;
1565   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1566   PetscInt       *adj = NULL;
1567   PetscErrorCode  ierr;
1568 
1569   PetscFunctionBegin;
1570   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1571   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1572   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1573   for (r = 0; r < numRanks; ++r) {
1574     const PetscInt rank = ranks[r];
1575 
1576     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1577     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1578     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1579     for (p = 0; p < numPoints; ++p) {
1580       adjSize = PETSC_DETERMINE;
1581       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1582       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1583     }
1584     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1585     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1586   }
1587   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1588   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1589   ierr = PetscFree(adj);CHKERRQ(ierr);
1590   PetscFunctionReturn(0);
1591 }
1592 
1593 #undef __FUNCT__
1594 #define __FUNCT__ "DMPlexPartitionLabelPropagate"
1595 /*@
1596   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1597 
1598   Input Parameters:
1599 + dm     - The DM
1600 - label  - DMLabel assinging ranks to remote roots
1601 
1602   Level: developer
1603 
1604   Note: This is required when generating multi-level overlaps to capture
1605   overlap points from non-neighbouring partitions.
1606 
1607 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1608 @*/
1609 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1610 {
1611   MPI_Comm        comm;
1612   PetscMPIInt     rank;
1613   PetscSF         sfPoint;
1614   DMLabel         lblRoots, lblLeaves;
1615   IS              rankIS, pointIS;
1616   const PetscInt *ranks;
1617   PetscInt        numRanks, r;
1618   PetscErrorCode  ierr;
1619 
1620   PetscFunctionBegin;
1621   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1622   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1623   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1624   /* Pull point contributions from remote leaves into local roots */
1625   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1626   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1627   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1628   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1629   for (r = 0; r < numRanks; ++r) {
1630     const PetscInt remoteRank = ranks[r];
1631     if (remoteRank == rank) continue;
1632     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1633     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1634     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1635   }
1636   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1637   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1638   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1639   /* Push point contributions from roots into remote leaves */
1640   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1641   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1642   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1643   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1644   for (r = 0; r < numRanks; ++r) {
1645     const PetscInt remoteRank = ranks[r];
1646     if (remoteRank == rank) continue;
1647     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1648     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1649     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1650   }
1651   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1652   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1653   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1654   PetscFunctionReturn(0);
1655 }
1656 
1657 #undef __FUNCT__
1658 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1659 /*@
1660   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1661 
1662   Input Parameters:
1663 + dm        - The DM
1664 . rootLabel - DMLabel assinging ranks to local roots
1665 . processSF - A star forest mapping into the local index on each remote rank
1666 
1667   Output Parameter:
1668 - leafLabel - DMLabel assinging ranks to remote roots
1669 
1670   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1671   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1672 
1673   Level: developer
1674 
1675 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1676 @*/
1677 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1678 {
1679   MPI_Comm           comm;
1680   PetscMPIInt        rank, numProcs;
1681   PetscInt           p, n, numNeighbors, size, l, nleaves;
1682   PetscSF            sfPoint;
1683   PetscSFNode       *rootPoints, *leafPoints;
1684   PetscSection       rootSection, leafSection;
1685   const PetscSFNode *remote;
1686   const PetscInt    *local, *neighbors;
1687   IS                 valueIS;
1688   PetscErrorCode     ierr;
1689 
1690   PetscFunctionBegin;
1691   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1692   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1693   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1694   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1695 
1696   /* Convert to (point, rank) and use actual owners */
1697   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1698   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1699   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1700   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1701   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1702   for (n = 0; n < numNeighbors; ++n) {
1703     PetscInt numPoints;
1704 
1705     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1706     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1707   }
1708   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1709   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1710   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1711   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1712   for (n = 0; n < numNeighbors; ++n) {
1713     IS              pointIS;
1714     const PetscInt *points;
1715     PetscInt        off, numPoints, p;
1716 
1717     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1718     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1719     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1720     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1721     for (p = 0; p < numPoints; ++p) {
1722       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1723       else       {l = -1;}
1724       if (l >= 0) {rootPoints[off+p] = remote[l];}
1725       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1726     }
1727     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1728     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1729   }
1730   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1731   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1732   /* Communicate overlap */
1733   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1734   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1735   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1736   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1737   for (p = 0; p < size; p++) {
1738     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1739   }
1740   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1741   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1742   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1743   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1744   PetscFunctionReturn(0);
1745 }
1746 
1747 #undef __FUNCT__
1748 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1749 /*@
1750   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1751 
1752   Input Parameters:
1753 + dm    - The DM
1754 . label - DMLabel assinging ranks to remote roots
1755 
1756   Output Parameter:
1757 - sf    - The star forest communication context encapsulating the defined mapping
1758 
1759   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1760 
1761   Level: developer
1762 
1763 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1764 @*/
1765 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1766 {
1767   PetscMPIInt     rank, numProcs;
1768   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1769   PetscSFNode    *remotePoints;
1770   IS              remoteRootIS;
1771   const PetscInt *remoteRoots;
1772   PetscErrorCode ierr;
1773 
1774   PetscFunctionBegin;
1775   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1776   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1777 
1778   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1779     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1780     numRemote += numPoints;
1781   }
1782   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1783   /* Put owned points first */
1784   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1785   if (numPoints > 0) {
1786     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1787     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1788     for (p = 0; p < numPoints; p++) {
1789       remotePoints[idx].index = remoteRoots[p];
1790       remotePoints[idx].rank = rank;
1791       idx++;
1792     }
1793     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1794     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1795   }
1796   /* Now add remote points */
1797   for (n = 0; n < numProcs; ++n) {
1798     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1799     if (numPoints <= 0 || n == rank) continue;
1800     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1801     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1802     for (p = 0; p < numPoints; p++) {
1803       remotePoints[idx].index = remoteRoots[p];
1804       remotePoints[idx].rank = n;
1805       idx++;
1806     }
1807     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1808     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1809   }
1810   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1811   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1812   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1813   PetscFunctionReturn(0);
1814 }
1815