xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 6a5a2ffd5374c2c3a8f5c9693674b3ef62a9c4c0)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
111   if (nroots > 0) {
112     ISLocalToGlobalMapping ltogCells;
113     PetscInt n, size, *cells_arr;
114     /* In parallel, apply a global cell numbering to the graph */
115     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
119     /* Convert to positive global cell numbers */
120     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
121     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
124   }
125   if (adjacency) *adjacency = graph;
126   /* Clean up */
127   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
128   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
129   ierr = PetscFree(adj);CHKERRQ(ierr);
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "DMPlexCreateNeighborCSR"
135 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136 {
137   const PetscInt maxFaceCases = 30;
138   PetscInt       numFaceCases = 0;
139   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140   PetscInt      *off, *adj;
141   PetscInt      *neighborCells = NULL;
142   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   /* For parallel partitioning, I think you have to communicate supports */
147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
148   cellDim = dim - cellHeight;
149   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
150   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
151   if (cEnd - cStart == 0) {
152     if (numVertices) *numVertices = 0;
153     if (offsets)   *offsets   = NULL;
154     if (adjacency) *adjacency = NULL;
155     PetscFunctionReturn(0);
156   }
157   numCells  = cEnd - cStart;
158   faceDepth = depth - cellHeight;
159   if (dim == depth) {
160     PetscInt f, fStart, fEnd;
161 
162     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
163     /* Count neighboring cells */
164     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
165     for (f = fStart; f < fEnd; ++f) {
166       const PetscInt *support;
167       PetscInt        supportSize;
168       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
169       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
170       if (supportSize == 2) {
171         PetscInt numChildren;
172 
173         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
174         if (!numChildren) {
175           ++off[support[0]-cStart+1];
176           ++off[support[1]-cStart+1];
177         }
178       }
179     }
180     /* Prefix sum */
181     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182     if (adjacency) {
183       PetscInt *tmp;
184 
185       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
186       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
187       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
188       /* Get neighboring cells */
189       for (f = fStart; f < fEnd; ++f) {
190         const PetscInt *support;
191         PetscInt        supportSize;
192         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
193         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
194         if (supportSize == 2) {
195           PetscInt numChildren;
196 
197           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
198           if (!numChildren) {
199             adj[tmp[support[0]-cStart]++] = support[1];
200             adj[tmp[support[1]-cStart]++] = support[0];
201           }
202         }
203       }
204 #if defined(PETSC_USE_DEBUG)
205       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206 #endif
207       ierr = PetscFree(tmp);CHKERRQ(ierr);
208     }
209     if (numVertices) *numVertices = numCells;
210     if (offsets)   *offsets   = off;
211     if (adjacency) *adjacency = adj;
212     PetscFunctionReturn(0);
213   }
214   /* Setup face recognition */
215   if (faceDepth == 1) {
216     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217 
218     for (c = cStart; c < cEnd; ++c) {
219       PetscInt corners;
220 
221       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
222       if (!cornersSeen[corners]) {
223         PetscInt nFV;
224 
225         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226         cornersSeen[corners] = 1;
227 
228         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
229 
230         numFaceVertices[numFaceCases++] = nFV;
231       }
232     }
233   }
234   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
235   /* Count neighboring cells */
236   for (cell = cStart; cell < cEnd; ++cell) {
237     PetscInt numNeighbors = PETSC_DETERMINE, n;
238 
239     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
240     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241     for (n = 0; n < numNeighbors; ++n) {
242       PetscInt        cellPair[2];
243       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244       PetscInt        meetSize = 0;
245       const PetscInt *meet    = NULL;
246 
247       cellPair[0] = cell; cellPair[1] = neighborCells[n];
248       if (cellPair[0] == cellPair[1]) continue;
249       if (!found) {
250         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
251         if (meetSize) {
252           PetscInt f;
253 
254           for (f = 0; f < numFaceCases; ++f) {
255             if (numFaceVertices[f] == meetSize) {
256               found = PETSC_TRUE;
257               break;
258             }
259           }
260         }
261         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
262       }
263       if (found) ++off[cell-cStart+1];
264     }
265   }
266   /* Prefix sum */
267   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268 
269   if (adjacency) {
270     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
271     /* Get neighboring cells */
272     for (cell = cStart; cell < cEnd; ++cell) {
273       PetscInt numNeighbors = PETSC_DETERMINE, n;
274       PetscInt cellOffset   = 0;
275 
276       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
277       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278       for (n = 0; n < numNeighbors; ++n) {
279         PetscInt        cellPair[2];
280         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281         PetscInt        meetSize = 0;
282         const PetscInt *meet    = NULL;
283 
284         cellPair[0] = cell; cellPair[1] = neighborCells[n];
285         if (cellPair[0] == cellPair[1]) continue;
286         if (!found) {
287           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
288           if (meetSize) {
289             PetscInt f;
290 
291             for (f = 0; f < numFaceCases; ++f) {
292               if (numFaceVertices[f] == meetSize) {
293                 found = PETSC_TRUE;
294                 break;
295               }
296             }
297           }
298           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
299         }
300         if (found) {
301           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302           ++cellOffset;
303         }
304       }
305     }
306   }
307   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
308   if (numVertices) *numVertices = numCells;
309   if (offsets)   *offsets   = off;
310   if (adjacency) *adjacency = adj;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PetscPartitionerRegister"
316 /*@C
317   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
318 
319   Not Collective
320 
321   Input Parameters:
322 + name        - The name of a new user-defined creation routine
323 - create_func - The creation routine itself
324 
325   Notes:
326   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
327 
328   Sample usage:
329 .vb
330     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
331 .ve
332 
333   Then, your PetscPartitioner type can be chosen with the procedural interface via
334 .vb
335     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
336     PetscPartitionerSetType(PetscPartitioner, "my_part");
337 .ve
338    or at runtime via the option
339 .vb
340     -petscpartitioner_type my_part
341 .ve
342 
343   Level: advanced
344 
345 .keywords: PetscPartitioner, register
346 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
347 
348 @*/
349 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
350 {
351   PetscErrorCode ierr;
352 
353   PetscFunctionBegin;
354   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PetscPartitionerSetType"
360 /*@C
361   PetscPartitionerSetType - Builds a particular PetscPartitioner
362 
363   Collective on PetscPartitioner
364 
365   Input Parameters:
366 + part - The PetscPartitioner object
367 - name - The kind of partitioner
368 
369   Options Database Key:
370 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
371 
372   Level: intermediate
373 
374 .keywords: PetscPartitioner, set, type
375 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
376 @*/
377 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
378 {
379   PetscErrorCode (*r)(PetscPartitioner);
380   PetscBool      match;
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
385   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
386   if (match) PetscFunctionReturn(0);
387 
388   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
389   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
390   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
391 
392   if (part->ops->destroy) {
393     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
394     part->ops->destroy = NULL;
395   }
396   ierr = (*r)(part);CHKERRQ(ierr);
397   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
398   PetscFunctionReturn(0);
399 }
400 
401 #undef __FUNCT__
402 #define __FUNCT__ "PetscPartitionerGetType"
403 /*@C
404   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
405 
406   Not Collective
407 
408   Input Parameter:
409 . part - The PetscPartitioner
410 
411   Output Parameter:
412 . name - The PetscPartitioner type name
413 
414   Level: intermediate
415 
416 .keywords: PetscPartitioner, get, type, name
417 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
418 @*/
419 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
420 {
421   PetscErrorCode ierr;
422 
423   PetscFunctionBegin;
424   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
425   PetscValidPointer(name, 2);
426   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
427   *name = ((PetscObject) part)->type_name;
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "PetscPartitionerView"
433 /*@C
434   PetscPartitionerView - Views a PetscPartitioner
435 
436   Collective on PetscPartitioner
437 
438   Input Parameter:
439 + part - the PetscPartitioner object to view
440 - v    - the viewer
441 
442   Level: developer
443 
444 .seealso: PetscPartitionerDestroy()
445 @*/
446 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
447 {
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
452   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
453   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
454   PetscFunctionReturn(0);
455 }
456 
457 #undef __FUNCT__
458 #define __FUNCT__ "PetscPartitionerViewFromOptions"
459 /*
460   PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed.
461 
462   Collective on PetscPartitioner
463 
464   Input Parameters:
465 + part   - the PetscPartitioner
466 . prefix - prefix to use for viewing, or NULL
467 - optionname - option to activate viewing
468 
469   Level: intermediate
470 
471 .keywords: PetscPartitioner, view, options, database
472 .seealso: VecViewFromOptions(), MatViewFromOptions()
473 */
474 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[])
475 {
476   PetscViewer       viewer;
477   PetscViewerFormat format;
478   PetscBool         flg;
479   PetscErrorCode    ierr;
480 
481   PetscFunctionBegin;
482   if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix,                       optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
483   else        {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
484   if (flg) {
485     ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr);
486     ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr);
487     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
488     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
489   }
490   PetscFunctionReturn(0);
491 }
492 #undef __FUNCT__
493 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
494 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
495 {
496   const char    *defaultType;
497   char           name[256];
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
503   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
504   else                                  defaultType = ((PetscObject) part)->type_name;
505   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
506 
507   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
508   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
509   if (flg) {
510     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
511   } else if (!((PetscObject) part)->type_name) {
512     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
513   }
514   ierr = PetscOptionsEnd();CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 #undef __FUNCT__
519 #define __FUNCT__ "PetscPartitionerSetFromOptions"
520 /*@
521   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
522 
523   Collective on PetscPartitioner
524 
525   Input Parameter:
526 . part - the PetscPartitioner object to set options for
527 
528   Level: developer
529 
530 .seealso: PetscPartitionerView()
531 @*/
532 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
533 {
534   PetscErrorCode ierr;
535 
536   PetscFunctionBegin;
537   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
538   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
539 
540   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
541   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
542   /* process any options handlers added with PetscObjectAddOptionsHandler() */
543   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
544   ierr = PetscOptionsEnd();CHKERRQ(ierr);
545   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
546   PetscFunctionReturn(0);
547 }
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "PetscPartitionerSetUp"
551 /*@C
552   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
553 
554   Collective on PetscPartitioner
555 
556   Input Parameter:
557 . part - the PetscPartitioner object to setup
558 
559   Level: developer
560 
561 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
562 @*/
563 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
564 {
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
569   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
570   PetscFunctionReturn(0);
571 }
572 
573 #undef __FUNCT__
574 #define __FUNCT__ "PetscPartitionerDestroy"
575 /*@
576   PetscPartitionerDestroy - Destroys a PetscPartitioner object
577 
578   Collective on PetscPartitioner
579 
580   Input Parameter:
581 . part - the PetscPartitioner object to destroy
582 
583   Level: developer
584 
585 .seealso: PetscPartitionerView()
586 @*/
587 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
588 {
589   PetscErrorCode ierr;
590 
591   PetscFunctionBegin;
592   if (!*part) PetscFunctionReturn(0);
593   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
594 
595   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
596   ((PetscObject) (*part))->refct = 0;
597 
598   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
599   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
600   PetscFunctionReturn(0);
601 }
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "PetscPartitionerCreate"
605 /*@
606   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
607 
608   Collective on MPI_Comm
609 
610   Input Parameter:
611 . comm - The communicator for the PetscPartitioner object
612 
613   Output Parameter:
614 . part - The PetscPartitioner object
615 
616   Level: beginner
617 
618 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE
619 @*/
620 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
621 {
622   PetscPartitioner p;
623   PetscErrorCode   ierr;
624 
625   PetscFunctionBegin;
626   PetscValidPointer(part, 2);
627   *part = NULL;
628   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
629 
630   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
631 
632   *part = p;
633   PetscFunctionReturn(0);
634 }
635 
636 #undef __FUNCT__
637 #define __FUNCT__ "PetscPartitionerPartition"
638 /*@
639   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
640 
641   Collective on DM
642 
643   Input Parameters:
644 + part    - The PetscPartitioner
645 - dm      - The mesh DM
646 
647   Output Parameters:
648 + partSection     - The PetscSection giving the division of points by partition
649 - partition       - The list of points by partition
650 
651   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
652 
653   Level: developer
654 
655 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
656 @*/
657 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
658 {
659   PetscMPIInt    size;
660   PetscErrorCode ierr;
661 
662   PetscFunctionBegin;
663   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
664   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
665   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
666   PetscValidPointer(partition, 5);
667   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
668   if (size == 1) {
669     PetscInt *points;
670     PetscInt  cStart, cEnd, c;
671 
672     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
673     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
674     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
675     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
676     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
677     for (c = cStart; c < cEnd; ++c) points[c] = c;
678     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
679     PetscFunctionReturn(0);
680   }
681   if (part->height == 0) {
682     PetscInt  numVertices;
683     PetscInt *start     = NULL;
684     PetscInt *adjacency = NULL;
685 
686     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
687     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
688     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
689     ierr = PetscFree(start);CHKERRQ(ierr);
690     ierr = PetscFree(adjacency);CHKERRQ(ierr);
691   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
692   PetscFunctionReturn(0);
693 }
694 
695 #undef __FUNCT__
696 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
697 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
698 {
699   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
700   PetscErrorCode          ierr;
701 
702   PetscFunctionBegin;
703   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
704   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
705   ierr = PetscFree(p);CHKERRQ(ierr);
706   PetscFunctionReturn(0);
707 }
708 
709 #undef __FUNCT__
710 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
711 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
712 {
713   PetscViewerFormat format;
714   PetscErrorCode    ierr;
715 
716   PetscFunctionBegin;
717   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
718   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
719   PetscFunctionReturn(0);
720 }
721 
722 #undef __FUNCT__
723 #define __FUNCT__ "PetscPartitionerView_Shell"
724 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
725 {
726   PetscBool      iascii;
727   PetscErrorCode ierr;
728 
729   PetscFunctionBegin;
730   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
731   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
732   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
733   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
734   PetscFunctionReturn(0);
735 }
736 
737 #undef __FUNCT__
738 #define __FUNCT__ "PetscPartitionerPartition_Shell"
739 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
740 {
741   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
742   PetscInt                np;
743   PetscErrorCode          ierr;
744 
745   PetscFunctionBegin;
746   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
747   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
748   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
749   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
750   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
751   *partition = p->partition;
752   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
753   PetscFunctionReturn(0);
754 }
755 
756 #undef __FUNCT__
757 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
758 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
759 {
760   PetscFunctionBegin;
761   part->ops->view      = PetscPartitionerView_Shell;
762   part->ops->destroy   = PetscPartitionerDestroy_Shell;
763   part->ops->partition = PetscPartitionerPartition_Shell;
764   PetscFunctionReturn(0);
765 }
766 
767 /*MC
768   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
769 
770   Level: intermediate
771 
772 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
773 M*/
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "PetscPartitionerCreate_Shell"
777 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
778 {
779   PetscPartitioner_Shell *p;
780   PetscErrorCode          ierr;
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
784   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
785   part->data = p;
786 
787   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
788   PetscFunctionReturn(0);
789 }
790 
791 #undef __FUNCT__
792 #define __FUNCT__ "PetscPartitionerShellSetPartition"
793 /*@C
794   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
795 
796   Collective on PART
797 
798   Input Parameters:
799 + part     - The PetscPartitioner
800 . numProcs - The number of partitions
801 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
802 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
803 
804   Level: developer
805 
806   Notes:
807 
808     It is safe to free the sizes and points arrays after use in this routine.
809 
810 .seealso DMPlexDistribute(), PetscPartitionerCreate()
811 @*/
812 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
813 {
814   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
815   PetscInt                proc, numPoints;
816   PetscErrorCode          ierr;
817 
818   PetscFunctionBegin;
819   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
820   if (sizes) {PetscValidPointer(sizes, 3);}
821   if (sizes) {PetscValidPointer(points, 4);}
822   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
823   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
824   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
825   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
826   if (sizes) {
827     for (proc = 0; proc < numProcs; ++proc) {
828       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
829     }
830   }
831   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
832   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
833   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
834   PetscFunctionReturn(0);
835 }
836 
837 #undef __FUNCT__
838 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
839 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
840 {
841   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
842   PetscErrorCode          ierr;
843 
844   PetscFunctionBegin;
845   ierr = PetscFree(p);CHKERRQ(ierr);
846   PetscFunctionReturn(0);
847 }
848 
849 #undef __FUNCT__
850 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
851 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
852 {
853   PetscViewerFormat format;
854   PetscErrorCode    ierr;
855 
856   PetscFunctionBegin;
857   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
858   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 
862 #undef __FUNCT__
863 #define __FUNCT__ "PetscPartitionerView_Simple"
864 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
865 {
866   PetscBool      iascii;
867   PetscErrorCode ierr;
868 
869   PetscFunctionBegin;
870   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
871   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
872   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
873   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
874   PetscFunctionReturn(0);
875 }
876 
877 #undef __FUNCT__
878 #define __FUNCT__ "PetscPartitionerPartition_Simple"
879 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
880 {
881   MPI_Comm       comm;
882   PetscInt       np;
883   PetscMPIInt    size;
884   PetscErrorCode ierr;
885 
886   PetscFunctionBegin;
887   comm = PetscObjectComm((PetscObject)dm);
888   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
889   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
890   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
891   if (size == 1) {
892     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
893   }
894   else {
895     PetscMPIInt rank;
896     PetscInt nvGlobal, *offsets, myFirst, myLast;
897 
898     ierr = PetscMalloc1(size+1,&offsets);
899     offsets[0] = 0;
900     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
901     for (np = 2; np <= size; np++) {
902       offsets[np] += offsets[np-1];
903     }
904     nvGlobal = offsets[size];
905     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
906     myFirst = offsets[rank];
907     myLast  = offsets[rank + 1] - 1;
908     ierr = PetscFree(offsets);CHKERRQ(ierr);
909     if (numVertices) {
910       PetscInt firstPart = 0, firstLargePart = 0;
911       PetscInt lastPart = 0, lastLargePart = 0;
912       PetscInt rem = nvGlobal % nparts;
913       PetscInt pSmall = nvGlobal/nparts;
914       PetscInt pBig = nvGlobal/nparts + 1;
915 
916 
917       if (rem) {
918         firstLargePart = myFirst / pBig;
919         lastLargePart  = myLast  / pBig;
920 
921         if (firstLargePart < rem) {
922           firstPart = firstLargePart;
923         }
924         else {
925           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
926         }
927         if (lastLargePart < rem) {
928           lastPart = lastLargePart;
929         }
930         else {
931           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
932         }
933       }
934       else {
935         firstPart = myFirst / (nvGlobal/nparts);
936         lastPart  = myLast  / (nvGlobal/nparts);
937       }
938 
939       for (np = firstPart; np <= lastPart; np++) {
940         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
941         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
942 
943         PartStart = PetscMax(PartStart,myFirst);
944         PartEnd   = PetscMin(PartEnd,myLast+1);
945         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
946       }
947     }
948   }
949   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
950   PetscFunctionReturn(0);
951 }
952 
953 #undef __FUNCT__
954 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
955 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
956 {
957   PetscFunctionBegin;
958   part->ops->view      = PetscPartitionerView_Simple;
959   part->ops->destroy   = PetscPartitionerDestroy_Simple;
960   part->ops->partition = PetscPartitionerPartition_Simple;
961   PetscFunctionReturn(0);
962 }
963 
964 /*MC
965   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
966 
967   Level: intermediate
968 
969 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
970 M*/
971 
972 #undef __FUNCT__
973 #define __FUNCT__ "PetscPartitionerCreate_Simple"
974 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
975 {
976   PetscPartitioner_Simple *p;
977   PetscErrorCode           ierr;
978 
979   PetscFunctionBegin;
980   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
981   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
982   part->data = p;
983 
984   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
985   PetscFunctionReturn(0);
986 }
987 
988 #undef __FUNCT__
989 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
990 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
991 {
992   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
993   PetscErrorCode          ierr;
994 
995   PetscFunctionBegin;
996   ierr = PetscFree(p);CHKERRQ(ierr);
997   PetscFunctionReturn(0);
998 }
999 
1000 #undef __FUNCT__
1001 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
1002 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1003 {
1004   PetscViewerFormat format;
1005   PetscErrorCode    ierr;
1006 
1007   PetscFunctionBegin;
1008   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1009   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 #undef __FUNCT__
1014 #define __FUNCT__ "PetscPartitionerView_Chaco"
1015 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1016 {
1017   PetscBool      iascii;
1018   PetscErrorCode ierr;
1019 
1020   PetscFunctionBegin;
1021   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1022   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1023   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1024   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1025   PetscFunctionReturn(0);
1026 }
1027 
1028 #if defined(PETSC_HAVE_CHACO)
1029 #if defined(PETSC_HAVE_UNISTD_H)
1030 #include <unistd.h>
1031 #endif
1032 /* Chaco does not have an include file */
1033 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1034                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1035                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1036                        int mesh_dims[3], double *goal, int global_method, int local_method,
1037                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1038 
1039 extern int FREE_GRAPH;
1040 #endif
1041 
1042 #undef __FUNCT__
1043 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1044 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1045 {
1046 #if defined(PETSC_HAVE_CHACO)
1047   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1048   MPI_Comm       comm;
1049   int            nvtxs          = numVertices; /* number of vertices in full graph */
1050   int           *vwgts          = NULL;   /* weights for all vertices */
1051   float         *ewgts          = NULL;   /* weights for all edges */
1052   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1053   char          *outassignname  = NULL;   /*  name of assignment output file */
1054   char          *outfilename    = NULL;   /* output file name */
1055   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1056   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1057   int            mesh_dims[3];            /* dimensions of mesh of processors */
1058   double        *goal          = NULL;    /* desired set sizes for each set */
1059   int            global_method = 1;       /* global partitioning algorithm */
1060   int            local_method  = 1;       /* local partitioning algorithm */
1061   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1062   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1063   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1064   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1065   long           seed          = 123636512; /* for random graph mutations */
1066   short int     *assignment;              /* Output partition */
1067   int            fd_stdout, fd_pipe[2];
1068   PetscInt      *points;
1069   int            i, v, p;
1070   PetscErrorCode ierr;
1071 
1072   PetscFunctionBegin;
1073   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1074   if (!numVertices) {
1075     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1076     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1077     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1078     PetscFunctionReturn(0);
1079   }
1080   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1081   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1082 
1083   if (global_method == INERTIAL_METHOD) {
1084     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1085     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1086   }
1087   mesh_dims[0] = nparts;
1088   mesh_dims[1] = 1;
1089   mesh_dims[2] = 1;
1090   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1091   /* Chaco outputs to stdout. We redirect this to a buffer. */
1092   /* TODO: check error codes for UNIX calls */
1093 #if defined(PETSC_HAVE_UNISTD_H)
1094   {
1095     int piperet;
1096     piperet = pipe(fd_pipe);
1097     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1098     fd_stdout = dup(1);
1099     close(1);
1100     dup2(fd_pipe[1], 1);
1101   }
1102 #endif
1103   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1104                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1105                    vmax, ndims, eigtol, seed);
1106 #if defined(PETSC_HAVE_UNISTD_H)
1107   {
1108     char msgLog[10000];
1109     int  count;
1110 
1111     fflush(stdout);
1112     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1113     if (count < 0) count = 0;
1114     msgLog[count] = 0;
1115     close(1);
1116     dup2(fd_stdout, 1);
1117     close(fd_stdout);
1118     close(fd_pipe[0]);
1119     close(fd_pipe[1]);
1120     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1121   }
1122 #endif
1123   /* Convert to PetscSection+IS */
1124   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1125   for (v = 0; v < nvtxs; ++v) {
1126     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1127   }
1128   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1129   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1130   for (p = 0, i = 0; p < nparts; ++p) {
1131     for (v = 0; v < nvtxs; ++v) {
1132       if (assignment[v] == p) points[i++] = v;
1133     }
1134   }
1135   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1136   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1137   if (global_method == INERTIAL_METHOD) {
1138     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1139   }
1140   ierr = PetscFree(assignment);CHKERRQ(ierr);
1141   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1142   PetscFunctionReturn(0);
1143 #else
1144   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1145 #endif
1146 }
1147 
1148 #undef __FUNCT__
1149 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1150 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1151 {
1152   PetscFunctionBegin;
1153   part->ops->view      = PetscPartitionerView_Chaco;
1154   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1155   part->ops->partition = PetscPartitionerPartition_Chaco;
1156   PetscFunctionReturn(0);
1157 }
1158 
1159 /*MC
1160   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1161 
1162   Level: intermediate
1163 
1164 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1165 M*/
1166 
1167 #undef __FUNCT__
1168 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1169 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1170 {
1171   PetscPartitioner_Chaco *p;
1172   PetscErrorCode          ierr;
1173 
1174   PetscFunctionBegin;
1175   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1176   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1177   part->data = p;
1178 
1179   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1180   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1181   PetscFunctionReturn(0);
1182 }
1183 
1184 #undef __FUNCT__
1185 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1186 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1187 {
1188   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1189   PetscErrorCode             ierr;
1190 
1191   PetscFunctionBegin;
1192   ierr = PetscFree(p);CHKERRQ(ierr);
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 #undef __FUNCT__
1197 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1198 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1199 {
1200   PetscViewerFormat format;
1201   PetscErrorCode    ierr;
1202 
1203   PetscFunctionBegin;
1204   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1205   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1206   PetscFunctionReturn(0);
1207 }
1208 
1209 #undef __FUNCT__
1210 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1211 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1212 {
1213   PetscBool      iascii;
1214   PetscErrorCode ierr;
1215 
1216   PetscFunctionBegin;
1217   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1218   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1219   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1220   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1221   PetscFunctionReturn(0);
1222 }
1223 
1224 #if defined(PETSC_HAVE_PARMETIS)
1225 #include <parmetis.h>
1226 #endif
1227 
1228 #undef __FUNCT__
1229 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1230 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1231 {
1232 #if defined(PETSC_HAVE_PARMETIS)
1233   MPI_Comm       comm;
1234   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1235   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1236   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1237   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1238   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1239   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1240   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1241   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1242   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1243   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1244   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1245   PetscInt       options[5];               /* Options */
1246   /* Outputs */
1247   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1248   PetscInt      *assignment, *points;
1249   PetscMPIInt    rank, p, v, i;
1250   PetscErrorCode ierr;
1251 
1252   PetscFunctionBegin;
1253   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1254   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1255   options[0] = 0; /* Use all defaults */
1256   /* Calculate vertex distribution */
1257   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1258   vtxdist[0] = 0;
1259   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1260   for (p = 2; p <= nparts; ++p) {
1261     vtxdist[p] += vtxdist[p-1];
1262   }
1263   /* Calculate weights */
1264   for (p = 0; p < nparts; ++p) {
1265     tpwgts[p] = 1.0/nparts;
1266   }
1267   ubvec[0] = 1.05;
1268 
1269   if (nparts == 1) {
1270     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1271   } else {
1272     if (vtxdist[1] == vtxdist[nparts]) {
1273       if (!rank) {
1274         PetscStackPush("METIS_PartGraphKway");
1275         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1276         PetscStackPop;
1277         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1278       }
1279     } else {
1280       PetscStackPush("ParMETIS_V3_PartKway");
1281       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1282       PetscStackPop;
1283       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1284     }
1285   }
1286   /* Convert to PetscSection+IS */
1287   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1288   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1289   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1290   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1291   for (p = 0, i = 0; p < nparts; ++p) {
1292     for (v = 0; v < nvtxs; ++v) {
1293       if (assignment[v] == p) points[i++] = v;
1294     }
1295   }
1296   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1297   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1298   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1299   PetscFunctionReturn(0);
1300 #else
1301   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1302 #endif
1303 }
1304 
1305 #undef __FUNCT__
1306 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1307 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1308 {
1309   PetscFunctionBegin;
1310   part->ops->view      = PetscPartitionerView_ParMetis;
1311   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1312   part->ops->partition = PetscPartitionerPartition_ParMetis;
1313   PetscFunctionReturn(0);
1314 }
1315 
1316 /*MC
1317   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1318 
1319   Level: intermediate
1320 
1321 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1322 M*/
1323 
1324 #undef __FUNCT__
1325 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1326 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1327 {
1328   PetscPartitioner_ParMetis *p;
1329   PetscErrorCode          ierr;
1330 
1331   PetscFunctionBegin;
1332   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1333   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1334   part->data = p;
1335 
1336   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1337   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 #undef __FUNCT__
1342 #define __FUNCT__ "DMPlexGetPartitioner"
1343 /*@
1344   DMPlexGetPartitioner - Get the mesh partitioner
1345 
1346   Not collective
1347 
1348   Input Parameter:
1349 . dm - The DM
1350 
1351   Output Parameter:
1352 . part - The PetscPartitioner
1353 
1354   Level: developer
1355 
1356   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1357 
1358 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1359 @*/
1360 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1361 {
1362   DM_Plex *mesh = (DM_Plex *) dm->data;
1363 
1364   PetscFunctionBegin;
1365   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1366   PetscValidPointer(part, 2);
1367   *part = mesh->partitioner;
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 #undef __FUNCT__
1372 #define __FUNCT__ "DMPlexSetPartitioner"
1373 /*@
1374   DMPlexSetPartitioner - Set the mesh partitioner
1375 
1376   logically collective on dm and part
1377 
1378   Input Parameters:
1379 + dm - The DM
1380 - part - The partitioner
1381 
1382   Level: developer
1383 
1384   Note: Any existing PetscPartitioner will be destroyed.
1385 
1386 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1387 @*/
1388 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1389 {
1390   DM_Plex       *mesh = (DM_Plex *) dm->data;
1391   PetscErrorCode ierr;
1392 
1393   PetscFunctionBegin;
1394   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1395   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1396   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1397   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1398   mesh->partitioner = part;
1399   PetscFunctionReturn(0);
1400 }
1401 
1402 #undef __FUNCT__
1403 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1404 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1405 {
1406   PetscErrorCode ierr;
1407 
1408   PetscFunctionBegin;
1409   if (up) {
1410     PetscInt parent;
1411 
1412     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1413     if (parent != point) {
1414       PetscInt closureSize, *closure = NULL, i;
1415 
1416       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1417       for (i = 0; i < closureSize; i++) {
1418         PetscInt cpoint = closure[2*i];
1419 
1420         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1421         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1422       }
1423       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1424     }
1425   }
1426   if (down) {
1427     PetscInt numChildren;
1428     const PetscInt *children;
1429 
1430     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1431     if (numChildren) {
1432       PetscInt i;
1433 
1434       for (i = 0; i < numChildren; i++) {
1435         PetscInt cpoint = children[i];
1436 
1437         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1438         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1439       }
1440     }
1441   }
1442   PetscFunctionReturn(0);
1443 }
1444 
1445 #undef __FUNCT__
1446 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1447 /*@
1448   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1449 
1450   Input Parameters:
1451 + dm     - The DM
1452 - label  - DMLabel assinging ranks to remote roots
1453 
1454   Level: developer
1455 
1456 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1457 @*/
1458 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1459 {
1460   IS              rankIS,   pointIS;
1461   const PetscInt *ranks,   *points;
1462   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1463   PetscInt       *closure = NULL;
1464   PetscErrorCode  ierr;
1465 
1466   PetscFunctionBegin;
1467   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1468   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1469   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1470   for (r = 0; r < numRanks; ++r) {
1471     const PetscInt rank = ranks[r];
1472 
1473     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1474     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1475     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1476     for (p = 0; p < numPoints; ++p) {
1477       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1478       for (c = 0; c < closureSize*2; c += 2) {
1479         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1480         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1481       }
1482     }
1483     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1484     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1485   }
1486   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1487   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1488   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1489   PetscFunctionReturn(0);
1490 }
1491 
1492 #undef __FUNCT__
1493 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1494 /*@
1495   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1496 
1497   Input Parameters:
1498 + dm     - The DM
1499 - label  - DMLabel assinging ranks to remote roots
1500 
1501   Level: developer
1502 
1503 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1504 @*/
1505 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1506 {
1507   IS              rankIS,   pointIS;
1508   const PetscInt *ranks,   *points;
1509   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1510   PetscInt       *adj = NULL;
1511   PetscErrorCode  ierr;
1512 
1513   PetscFunctionBegin;
1514   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1515   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1516   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1517   for (r = 0; r < numRanks; ++r) {
1518     const PetscInt rank = ranks[r];
1519 
1520     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1521     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1522     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1523     for (p = 0; p < numPoints; ++p) {
1524       adjSize = PETSC_DETERMINE;
1525       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1526       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1527     }
1528     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1529     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1530   }
1531   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1532   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1533   ierr = PetscFree(adj);CHKERRQ(ierr);
1534   PetscFunctionReturn(0);
1535 }
1536 
1537 #undef __FUNCT__
1538 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1539 /*@
1540   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1541 
1542   Input Parameters:
1543 + dm        - The DM
1544 . rootLabel - DMLabel assinging ranks to local roots
1545 . processSF - A star forest mapping into the local index on each remote rank
1546 
1547   Output Parameter:
1548 - leafLabel - DMLabel assinging ranks to remote roots
1549 
1550   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1551   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1552 
1553   Level: developer
1554 
1555 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1556 @*/
1557 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1558 {
1559   MPI_Comm           comm;
1560   PetscMPIInt        rank, numProcs;
1561   PetscInt           p, n, numNeighbors, size, l, nleaves;
1562   PetscSF            sfPoint;
1563   PetscSFNode       *rootPoints, *leafPoints;
1564   PetscSection       rootSection, leafSection;
1565   const PetscSFNode *remote;
1566   const PetscInt    *local, *neighbors;
1567   IS                 valueIS;
1568   PetscErrorCode     ierr;
1569 
1570   PetscFunctionBegin;
1571   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1572   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1573   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1574   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1575 
1576   /* Convert to (point, rank) and use actual owners */
1577   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1578   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1579   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1580   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1581   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1582   for (n = 0; n < numNeighbors; ++n) {
1583     PetscInt numPoints;
1584 
1585     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1586     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1587   }
1588   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1589   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1590   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1591   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1592   for (n = 0; n < numNeighbors; ++n) {
1593     IS              pointIS;
1594     const PetscInt *points;
1595     PetscInt        off, numPoints, p;
1596 
1597     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1598     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1599     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1600     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1601     for (p = 0; p < numPoints; ++p) {
1602       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1603       else       {l = -1;}
1604       if (l >= 0) {rootPoints[off+p] = remote[l];}
1605       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1606     }
1607     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1608     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1609   }
1610   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1611   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1612   /* Communicate overlap */
1613   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1614   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1615   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1616   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1617   for (p = 0; p < size; p++) {
1618     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1619   }
1620   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1621   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1622   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1623   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1624   PetscFunctionReturn(0);
1625 }
1626 
1627 #undef __FUNCT__
1628 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1629 /*@
1630   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1631 
1632   Input Parameters:
1633 + dm    - The DM
1634 . label - DMLabel assinging ranks to remote roots
1635 
1636   Output Parameter:
1637 - sf    - The star forest communication context encapsulating the defined mapping
1638 
1639   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1640 
1641   Level: developer
1642 
1643 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1644 @*/
1645 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1646 {
1647   PetscMPIInt     rank, numProcs;
1648   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1649   PetscSFNode    *remotePoints;
1650   IS              remoteRootIS;
1651   const PetscInt *remoteRoots;
1652   PetscErrorCode ierr;
1653 
1654   PetscFunctionBegin;
1655   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1656   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1657 
1658   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1659     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1660     numRemote += numPoints;
1661   }
1662   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1663   /* Put owned points first */
1664   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1665   if (numPoints > 0) {
1666     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1667     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1668     for (p = 0; p < numPoints; p++) {
1669       remotePoints[idx].index = remoteRoots[p];
1670       remotePoints[idx].rank = rank;
1671       idx++;
1672     }
1673     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1674     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1675   }
1676   /* Now add remote points */
1677   for (n = 0; n < numProcs; ++n) {
1678     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1679     if (numPoints <= 0 || n == rank) continue;
1680     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1681     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1682     for (p = 0; p < numPoints; p++) {
1683       remotePoints[idx].index = remoteRoots[p];
1684       remotePoints[idx].rank = n;
1685       idx++;
1686     }
1687     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1688     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1689   }
1690   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1691   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1692   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1693   PetscFunctionReturn(0);
1694 }
1695