xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 2b8d69ca7ea5fe9190df62c1dce3bbd66fce84dd)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
111   if (nroots > 0) {
112     ISLocalToGlobalMapping ltogCells;
113     PetscInt n, size, *cells_arr;
114     /* In parallel, apply a global cell numbering to the graph */
115     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
119     /* Convert to positive global cell numbers */
120     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
121     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
124   }
125   if (adjacency) *adjacency = graph;
126   /* Clean up */
127   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
128   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
129   ierr = PetscFree(adj);CHKERRQ(ierr);
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "DMPlexCreateNeighborCSR"
135 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136 {
137   const PetscInt maxFaceCases = 30;
138   PetscInt       numFaceCases = 0;
139   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140   PetscInt      *off, *adj;
141   PetscInt      *neighborCells = NULL;
142   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   /* For parallel partitioning, I think you have to communicate supports */
147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
148   cellDim = dim - cellHeight;
149   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
150   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
151   if (cEnd - cStart == 0) {
152     if (numVertices) *numVertices = 0;
153     if (offsets)   *offsets   = NULL;
154     if (adjacency) *adjacency = NULL;
155     PetscFunctionReturn(0);
156   }
157   numCells  = cEnd - cStart;
158   faceDepth = depth - cellHeight;
159   if (dim == depth) {
160     PetscInt f, fStart, fEnd;
161 
162     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
163     /* Count neighboring cells */
164     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
165     for (f = fStart; f < fEnd; ++f) {
166       const PetscInt *support;
167       PetscInt        supportSize;
168       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
169       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
170       if (supportSize == 2) {
171         PetscInt numChildren;
172 
173         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
174         if (!numChildren) {
175           ++off[support[0]-cStart+1];
176           ++off[support[1]-cStart+1];
177         }
178       }
179     }
180     /* Prefix sum */
181     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182     if (adjacency) {
183       PetscInt *tmp;
184 
185       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
186       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
187       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
188       /* Get neighboring cells */
189       for (f = fStart; f < fEnd; ++f) {
190         const PetscInt *support;
191         PetscInt        supportSize;
192         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
193         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
194         if (supportSize == 2) {
195           PetscInt numChildren;
196 
197           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
198           if (!numChildren) {
199             adj[tmp[support[0]-cStart]++] = support[1];
200             adj[tmp[support[1]-cStart]++] = support[0];
201           }
202         }
203       }
204 #if defined(PETSC_USE_DEBUG)
205       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206 #endif
207       ierr = PetscFree(tmp);CHKERRQ(ierr);
208     }
209     if (numVertices) *numVertices = numCells;
210     if (offsets)   *offsets   = off;
211     if (adjacency) *adjacency = adj;
212     PetscFunctionReturn(0);
213   }
214   /* Setup face recognition */
215   if (faceDepth == 1) {
216     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217 
218     for (c = cStart; c < cEnd; ++c) {
219       PetscInt corners;
220 
221       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
222       if (!cornersSeen[corners]) {
223         PetscInt nFV;
224 
225         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226         cornersSeen[corners] = 1;
227 
228         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
229 
230         numFaceVertices[numFaceCases++] = nFV;
231       }
232     }
233   }
234   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
235   /* Count neighboring cells */
236   for (cell = cStart; cell < cEnd; ++cell) {
237     PetscInt numNeighbors = PETSC_DETERMINE, n;
238 
239     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
240     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241     for (n = 0; n < numNeighbors; ++n) {
242       PetscInt        cellPair[2];
243       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244       PetscInt        meetSize = 0;
245       const PetscInt *meet    = NULL;
246 
247       cellPair[0] = cell; cellPair[1] = neighborCells[n];
248       if (cellPair[0] == cellPair[1]) continue;
249       if (!found) {
250         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
251         if (meetSize) {
252           PetscInt f;
253 
254           for (f = 0; f < numFaceCases; ++f) {
255             if (numFaceVertices[f] == meetSize) {
256               found = PETSC_TRUE;
257               break;
258             }
259           }
260         }
261         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
262       }
263       if (found) ++off[cell-cStart+1];
264     }
265   }
266   /* Prefix sum */
267   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268 
269   if (adjacency) {
270     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
271     /* Get neighboring cells */
272     for (cell = cStart; cell < cEnd; ++cell) {
273       PetscInt numNeighbors = PETSC_DETERMINE, n;
274       PetscInt cellOffset   = 0;
275 
276       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
277       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278       for (n = 0; n < numNeighbors; ++n) {
279         PetscInt        cellPair[2];
280         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281         PetscInt        meetSize = 0;
282         const PetscInt *meet    = NULL;
283 
284         cellPair[0] = cell; cellPair[1] = neighborCells[n];
285         if (cellPair[0] == cellPair[1]) continue;
286         if (!found) {
287           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
288           if (meetSize) {
289             PetscInt f;
290 
291             for (f = 0; f < numFaceCases; ++f) {
292               if (numFaceVertices[f] == meetSize) {
293                 found = PETSC_TRUE;
294                 break;
295               }
296             }
297           }
298           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
299         }
300         if (found) {
301           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302           ++cellOffset;
303         }
304       }
305     }
306   }
307   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
308   if (numVertices) *numVertices = numCells;
309   if (offsets)   *offsets   = off;
310   if (adjacency) *adjacency = adj;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PetscPartitionerRegister"
316 /*@C
317   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
318 
319   Not Collective
320 
321   Input Parameters:
322 + name        - The name of a new user-defined creation routine
323 - create_func - The creation routine itself
324 
325   Notes:
326   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
327 
328   Sample usage:
329 .vb
330     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
331 .ve
332 
333   Then, your PetscPartitioner type can be chosen with the procedural interface via
334 .vb
335     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
336     PetscPartitionerSetType(PetscPartitioner, "my_part");
337 .ve
338    or at runtime via the option
339 .vb
340     -petscpartitioner_type my_part
341 .ve
342 
343   Level: advanced
344 
345 .keywords: PetscPartitioner, register
346 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
347 
348 @*/
349 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
350 {
351   PetscErrorCode ierr;
352 
353   PetscFunctionBegin;
354   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PetscPartitionerSetType"
360 /*@C
361   PetscPartitionerSetType - Builds a particular PetscPartitioner
362 
363   Collective on PetscPartitioner
364 
365   Input Parameters:
366 + part - The PetscPartitioner object
367 - name - The kind of partitioner
368 
369   Options Database Key:
370 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
371 
372   Level: intermediate
373 
374 .keywords: PetscPartitioner, set, type
375 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
376 @*/
377 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
378 {
379   PetscErrorCode (*r)(PetscPartitioner);
380   PetscBool      match;
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
385   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
386   if (match) PetscFunctionReturn(0);
387 
388   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
389   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
390   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
391 
392   if (part->ops->destroy) {
393     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
394     part->ops->destroy = NULL;
395   }
396   ierr = (*r)(part);CHKERRQ(ierr);
397   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
398   PetscFunctionReturn(0);
399 }
400 
401 #undef __FUNCT__
402 #define __FUNCT__ "PetscPartitionerGetType"
403 /*@C
404   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
405 
406   Not Collective
407 
408   Input Parameter:
409 . part - The PetscPartitioner
410 
411   Output Parameter:
412 . name - The PetscPartitioner type name
413 
414   Level: intermediate
415 
416 .keywords: PetscPartitioner, get, type, name
417 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
418 @*/
419 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
420 {
421   PetscErrorCode ierr;
422 
423   PetscFunctionBegin;
424   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
425   PetscValidPointer(name, 2);
426   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
427   *name = ((PetscObject) part)->type_name;
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "PetscPartitionerView"
433 /*@C
434   PetscPartitionerView - Views a PetscPartitioner
435 
436   Collective on PetscPartitioner
437 
438   Input Parameter:
439 + part - the PetscPartitioner object to view
440 - v    - the viewer
441 
442   Level: developer
443 
444 .seealso: PetscPartitionerDestroy()
445 @*/
446 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
447 {
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
452   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
453   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
454   PetscFunctionReturn(0);
455 }
456 
457 #undef __FUNCT__
458 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
459 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
460 {
461   const char    *defaultType;
462   char           name[256];
463   PetscBool      flg;
464   PetscErrorCode ierr;
465 
466   PetscFunctionBegin;
467   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
468   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
469   else                                  defaultType = ((PetscObject) part)->type_name;
470   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
471 
472   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
473   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
474   if (flg) {
475     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
476   } else if (!((PetscObject) part)->type_name) {
477     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
478   }
479   ierr = PetscOptionsEnd();CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 #undef __FUNCT__
484 #define __FUNCT__ "PetscPartitionerSetFromOptions"
485 /*@
486   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
487 
488   Collective on PetscPartitioner
489 
490   Input Parameter:
491 . part - the PetscPartitioner object to set options for
492 
493   Level: developer
494 
495 .seealso: PetscPartitionerView()
496 @*/
497 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
498 {
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
503   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
504 
505   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
506   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
507   /* process any options handlers added with PetscObjectAddOptionsHandler() */
508   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
509   ierr = PetscOptionsEnd();CHKERRQ(ierr);
510   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
511   PetscFunctionReturn(0);
512 }
513 
514 #undef __FUNCT__
515 #define __FUNCT__ "PetscPartitionerSetUp"
516 /*@C
517   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
518 
519   Collective on PetscPartitioner
520 
521   Input Parameter:
522 . part - the PetscPartitioner object to setup
523 
524   Level: developer
525 
526 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
527 @*/
528 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
529 {
530   PetscErrorCode ierr;
531 
532   PetscFunctionBegin;
533   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
534   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
535   PetscFunctionReturn(0);
536 }
537 
538 #undef __FUNCT__
539 #define __FUNCT__ "PetscPartitionerDestroy"
540 /*@
541   PetscPartitionerDestroy - Destroys a PetscPartitioner object
542 
543   Collective on PetscPartitioner
544 
545   Input Parameter:
546 . part - the PetscPartitioner object to destroy
547 
548   Level: developer
549 
550 .seealso: PetscPartitionerView()
551 @*/
552 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
553 {
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   if (!*part) PetscFunctionReturn(0);
558   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
559 
560   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
561   ((PetscObject) (*part))->refct = 0;
562 
563   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
564   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
565   PetscFunctionReturn(0);
566 }
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "PetscPartitionerCreate"
570 /*@
571   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
572 
573   Collective on MPI_Comm
574 
575   Input Parameter:
576 . comm - The communicator for the PetscPartitioner object
577 
578   Output Parameter:
579 . part - The PetscPartitioner object
580 
581   Level: beginner
582 
583 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
584 @*/
585 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
586 {
587   PetscPartitioner p;
588   PetscErrorCode   ierr;
589 
590   PetscFunctionBegin;
591   PetscValidPointer(part, 2);
592   *part = NULL;
593   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
594 
595   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
596 
597   *part = p;
598   PetscFunctionReturn(0);
599 }
600 
601 #undef __FUNCT__
602 #define __FUNCT__ "PetscPartitionerPartition"
603 /*@
604   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
605 
606   Collective on DM
607 
608   Input Parameters:
609 + part    - The PetscPartitioner
610 - dm      - The mesh DM
611 
612   Output Parameters:
613 + partSection     - The PetscSection giving the division of points by partition
614 - partition       - The list of points by partition
615 
616   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
617 
618   Level: developer
619 
620 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
621 @*/
622 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
623 {
624   PetscMPIInt    size;
625   PetscErrorCode ierr;
626 
627   PetscFunctionBegin;
628   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
629   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
630   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
631   PetscValidPointer(partition, 5);
632   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
633   if (size == 1) {
634     PetscInt *points;
635     PetscInt  cStart, cEnd, c;
636 
637     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
638     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
639     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
640     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
641     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
642     for (c = cStart; c < cEnd; ++c) points[c] = c;
643     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
644     PetscFunctionReturn(0);
645   }
646   if (part->height == 0) {
647     PetscInt  numVertices;
648     PetscInt *start     = NULL;
649     PetscInt *adjacency = NULL;
650 
651     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
652     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
653     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
654     ierr = PetscFree(start);CHKERRQ(ierr);
655     ierr = PetscFree(adjacency);CHKERRQ(ierr);
656   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
657   PetscFunctionReturn(0);
658 }
659 
660 #undef __FUNCT__
661 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
662 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
663 {
664   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
665   PetscErrorCode          ierr;
666 
667   PetscFunctionBegin;
668   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
669   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
670   ierr = PetscFree(p);CHKERRQ(ierr);
671   PetscFunctionReturn(0);
672 }
673 
674 #undef __FUNCT__
675 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
676 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
677 {
678   PetscViewerFormat format;
679   PetscErrorCode    ierr;
680 
681   PetscFunctionBegin;
682   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
683   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 #undef __FUNCT__
688 #define __FUNCT__ "PetscPartitionerView_Shell"
689 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
690 {
691   PetscBool      iascii;
692   PetscErrorCode ierr;
693 
694   PetscFunctionBegin;
695   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
696   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
697   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
698   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
699   PetscFunctionReturn(0);
700 }
701 
702 #undef __FUNCT__
703 #define __FUNCT__ "PetscPartitionerPartition_Shell"
704 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
705 {
706   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
707   PetscInt                np;
708   PetscErrorCode          ierr;
709 
710   PetscFunctionBegin;
711   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
712   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
713   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
714   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
715   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
716   *partition = p->partition;
717   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
718   PetscFunctionReturn(0);
719 }
720 
721 #undef __FUNCT__
722 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
723 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
724 {
725   PetscFunctionBegin;
726   part->ops->view      = PetscPartitionerView_Shell;
727   part->ops->destroy   = PetscPartitionerDestroy_Shell;
728   part->ops->partition = PetscPartitionerPartition_Shell;
729   PetscFunctionReturn(0);
730 }
731 
732 /*MC
733   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
734 
735   Level: intermediate
736 
737 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
738 M*/
739 
740 #undef __FUNCT__
741 #define __FUNCT__ "PetscPartitionerCreate_Shell"
742 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
743 {
744   PetscPartitioner_Shell *p;
745   PetscErrorCode          ierr;
746 
747   PetscFunctionBegin;
748   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
749   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
750   part->data = p;
751 
752   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
753   PetscFunctionReturn(0);
754 }
755 
756 #undef __FUNCT__
757 #define __FUNCT__ "PetscPartitionerShellSetPartition"
758 /*@C
759   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
760 
761   Collective on PART
762 
763   Input Parameters:
764 + part     - The PetscPartitioner
765 . numProcs - The number of partitions
766 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
767 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
768 
769   Level: developer
770 
771   Notes:
772 
773     It is safe to free the sizes and points arrays after use in this routine.
774 
775 .seealso DMPlexDistribute(), PetscPartitionerCreate()
776 @*/
777 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
778 {
779   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
780   PetscInt                proc, numPoints;
781   PetscErrorCode          ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
785   if (sizes) {PetscValidPointer(sizes, 3);}
786   if (sizes) {PetscValidPointer(points, 4);}
787   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
788   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
789   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
790   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
791   if (sizes) {
792     for (proc = 0; proc < numProcs; ++proc) {
793       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
794     }
795   }
796   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
797   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
798   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
804 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
805 {
806   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
807   PetscErrorCode          ierr;
808 
809   PetscFunctionBegin;
810   ierr = PetscFree(p);CHKERRQ(ierr);
811   PetscFunctionReturn(0);
812 }
813 
814 #undef __FUNCT__
815 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
816 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
817 {
818   PetscViewerFormat format;
819   PetscErrorCode    ierr;
820 
821   PetscFunctionBegin;
822   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
823   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
824   PetscFunctionReturn(0);
825 }
826 
827 #undef __FUNCT__
828 #define __FUNCT__ "PetscPartitionerView_Simple"
829 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
830 {
831   PetscBool      iascii;
832   PetscErrorCode ierr;
833 
834   PetscFunctionBegin;
835   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
836   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
837   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
838   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
839   PetscFunctionReturn(0);
840 }
841 
842 #undef __FUNCT__
843 #define __FUNCT__ "PetscPartitionerPartition_Simple"
844 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
845 {
846   MPI_Comm       comm;
847   PetscInt       np;
848   PetscMPIInt    size;
849   PetscErrorCode ierr;
850 
851   PetscFunctionBegin;
852   comm = PetscObjectComm((PetscObject)dm);
853   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
854   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
855   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
856   if (size == 1) {
857     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
858   }
859   else {
860     PetscMPIInt rank;
861     PetscInt nvGlobal, *offsets, myFirst, myLast;
862 
863     ierr = PetscMalloc1(size+1,&offsets);
864     offsets[0] = 0;
865     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
866     for (np = 2; np <= size; np++) {
867       offsets[np] += offsets[np-1];
868     }
869     nvGlobal = offsets[size];
870     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
871     myFirst = offsets[rank];
872     myLast  = offsets[rank + 1] - 1;
873     ierr = PetscFree(offsets);CHKERRQ(ierr);
874     if (numVertices) {
875       PetscInt firstPart = 0, firstLargePart = 0;
876       PetscInt lastPart = 0, lastLargePart = 0;
877       PetscInt rem = nvGlobal % nparts;
878       PetscInt pSmall = nvGlobal/nparts;
879       PetscInt pBig = nvGlobal/nparts + 1;
880 
881 
882       if (rem) {
883         firstLargePart = myFirst / pBig;
884         lastLargePart  = myLast  / pBig;
885 
886         if (firstLargePart < rem) {
887           firstPart = firstLargePart;
888         }
889         else {
890           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
891         }
892         if (lastLargePart < rem) {
893           lastPart = lastLargePart;
894         }
895         else {
896           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
897         }
898       }
899       else {
900         firstPart = myFirst / (nvGlobal/nparts);
901         lastPart  = myLast  / (nvGlobal/nparts);
902       }
903 
904       for (np = firstPart; np <= lastPart; np++) {
905         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
906         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
907 
908         PartStart = PetscMax(PartStart,myFirst);
909         PartEnd   = PetscMin(PartEnd,myLast+1);
910         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
911       }
912     }
913   }
914   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
915   PetscFunctionReturn(0);
916 }
917 
918 #undef __FUNCT__
919 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
920 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
921 {
922   PetscFunctionBegin;
923   part->ops->view      = PetscPartitionerView_Simple;
924   part->ops->destroy   = PetscPartitionerDestroy_Simple;
925   part->ops->partition = PetscPartitionerPartition_Simple;
926   PetscFunctionReturn(0);
927 }
928 
929 /*MC
930   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
931 
932   Level: intermediate
933 
934 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
935 M*/
936 
937 #undef __FUNCT__
938 #define __FUNCT__ "PetscPartitionerCreate_Simple"
939 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
940 {
941   PetscPartitioner_Simple *p;
942   PetscErrorCode           ierr;
943 
944   PetscFunctionBegin;
945   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
946   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
947   part->data = p;
948 
949   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
950   PetscFunctionReturn(0);
951 }
952 
953 #undef __FUNCT__
954 #define __FUNCT__ "PetscPartitionerDestroy_Gather"
955 PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
956 {
957   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
958   PetscErrorCode          ierr;
959 
960   PetscFunctionBegin;
961   ierr = PetscFree(p);CHKERRQ(ierr);
962   PetscFunctionReturn(0);
963 }
964 
965 #undef __FUNCT__
966 #define __FUNCT__ "PetscPartitionerView_Gather_Ascii"
967 PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
968 {
969   PetscViewerFormat format;
970   PetscErrorCode    ierr;
971 
972   PetscFunctionBegin;
973   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
974   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
975   PetscFunctionReturn(0);
976 }
977 
978 #undef __FUNCT__
979 #define __FUNCT__ "PetscPartitionerView_Gather"
980 PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
981 {
982   PetscBool      iascii;
983   PetscErrorCode ierr;
984 
985   PetscFunctionBegin;
986   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
987   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
988   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
989   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
990   PetscFunctionReturn(0);
991 }
992 
993 #undef __FUNCT__
994 #define __FUNCT__ "PetscPartitionerPartition_Gather"
995 PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
996 {
997   PetscInt       np;
998   PetscErrorCode ierr;
999 
1000   PetscFunctionBegin;
1001   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1002   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1003   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1004   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1005   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1006   PetscFunctionReturn(0);
1007 }
1008 
1009 #undef __FUNCT__
1010 #define __FUNCT__ "PetscPartitionerInitialize_Gather"
1011 PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1012 {
1013   PetscFunctionBegin;
1014   part->ops->view      = PetscPartitionerView_Gather;
1015   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1016   part->ops->partition = PetscPartitionerPartition_Gather;
1017   PetscFunctionReturn(0);
1018 }
1019 
1020 /*MC
1021   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1022 
1023   Level: intermediate
1024 
1025 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1026 M*/
1027 
1028 #undef __FUNCT__
1029 #define __FUNCT__ "PetscPartitionerCreate_Gather"
1030 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1031 {
1032   PetscPartitioner_Gather *p;
1033   PetscErrorCode           ierr;
1034 
1035   PetscFunctionBegin;
1036   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1037   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1038   part->data = p;
1039 
1040   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 
1045 #undef __FUNCT__
1046 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
1047 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1048 {
1049   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1050   PetscErrorCode          ierr;
1051 
1052   PetscFunctionBegin;
1053   ierr = PetscFree(p);CHKERRQ(ierr);
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 #undef __FUNCT__
1058 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
1059 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1060 {
1061   PetscViewerFormat format;
1062   PetscErrorCode    ierr;
1063 
1064   PetscFunctionBegin;
1065   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1066   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 #undef __FUNCT__
1071 #define __FUNCT__ "PetscPartitionerView_Chaco"
1072 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1073 {
1074   PetscBool      iascii;
1075   PetscErrorCode ierr;
1076 
1077   PetscFunctionBegin;
1078   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1079   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1080   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1081   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1082   PetscFunctionReturn(0);
1083 }
1084 
1085 #if defined(PETSC_HAVE_CHACO)
1086 #if defined(PETSC_HAVE_UNISTD_H)
1087 #include <unistd.h>
1088 #endif
1089 /* Chaco does not have an include file */
1090 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1091                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1092                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1093                        int mesh_dims[3], double *goal, int global_method, int local_method,
1094                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1095 
1096 extern int FREE_GRAPH;
1097 #endif
1098 
1099 #undef __FUNCT__
1100 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1101 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1102 {
1103 #if defined(PETSC_HAVE_CHACO)
1104   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1105   MPI_Comm       comm;
1106   int            nvtxs          = numVertices; /* number of vertices in full graph */
1107   int           *vwgts          = NULL;   /* weights for all vertices */
1108   float         *ewgts          = NULL;   /* weights for all edges */
1109   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1110   char          *outassignname  = NULL;   /*  name of assignment output file */
1111   char          *outfilename    = NULL;   /* output file name */
1112   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1113   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1114   int            mesh_dims[3];            /* dimensions of mesh of processors */
1115   double        *goal          = NULL;    /* desired set sizes for each set */
1116   int            global_method = 1;       /* global partitioning algorithm */
1117   int            local_method  = 1;       /* local partitioning algorithm */
1118   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1119   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1120   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1121   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1122   long           seed          = 123636512; /* for random graph mutations */
1123   short int     *assignment;              /* Output partition */
1124   int            fd_stdout, fd_pipe[2];
1125   PetscInt      *points;
1126   int            i, v, p;
1127   PetscErrorCode ierr;
1128 
1129   PetscFunctionBegin;
1130   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1131   if (!numVertices) {
1132     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1133     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1134     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1135     PetscFunctionReturn(0);
1136   }
1137   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1138   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1139 
1140   if (global_method == INERTIAL_METHOD) {
1141     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1142     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1143   }
1144   mesh_dims[0] = nparts;
1145   mesh_dims[1] = 1;
1146   mesh_dims[2] = 1;
1147   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1148   /* Chaco outputs to stdout. We redirect this to a buffer. */
1149   /* TODO: check error codes for UNIX calls */
1150 #if defined(PETSC_HAVE_UNISTD_H)
1151   {
1152     int piperet;
1153     piperet = pipe(fd_pipe);
1154     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1155     fd_stdout = dup(1);
1156     close(1);
1157     dup2(fd_pipe[1], 1);
1158   }
1159 #endif
1160   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1161                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1162                    vmax, ndims, eigtol, seed);
1163 #if defined(PETSC_HAVE_UNISTD_H)
1164   {
1165     char msgLog[10000];
1166     int  count;
1167 
1168     fflush(stdout);
1169     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1170     if (count < 0) count = 0;
1171     msgLog[count] = 0;
1172     close(1);
1173     dup2(fd_stdout, 1);
1174     close(fd_stdout);
1175     close(fd_pipe[0]);
1176     close(fd_pipe[1]);
1177     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1178   }
1179 #endif
1180   /* Convert to PetscSection+IS */
1181   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1182   for (v = 0; v < nvtxs; ++v) {
1183     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1184   }
1185   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1186   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1187   for (p = 0, i = 0; p < nparts; ++p) {
1188     for (v = 0; v < nvtxs; ++v) {
1189       if (assignment[v] == p) points[i++] = v;
1190     }
1191   }
1192   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1193   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1194   if (global_method == INERTIAL_METHOD) {
1195     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1196   }
1197   ierr = PetscFree(assignment);CHKERRQ(ierr);
1198   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1199   PetscFunctionReturn(0);
1200 #else
1201   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1202 #endif
1203 }
1204 
1205 #undef __FUNCT__
1206 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1207 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1208 {
1209   PetscFunctionBegin;
1210   part->ops->view      = PetscPartitionerView_Chaco;
1211   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1212   part->ops->partition = PetscPartitionerPartition_Chaco;
1213   PetscFunctionReturn(0);
1214 }
1215 
1216 /*MC
1217   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1218 
1219   Level: intermediate
1220 
1221 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1222 M*/
1223 
1224 #undef __FUNCT__
1225 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1226 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1227 {
1228   PetscPartitioner_Chaco *p;
1229   PetscErrorCode          ierr;
1230 
1231   PetscFunctionBegin;
1232   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1233   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1234   part->data = p;
1235 
1236   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1237   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1238   PetscFunctionReturn(0);
1239 }
1240 
1241 #undef __FUNCT__
1242 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1243 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1244 {
1245   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1246   PetscErrorCode             ierr;
1247 
1248   PetscFunctionBegin;
1249   ierr = PetscFree(p);CHKERRQ(ierr);
1250   PetscFunctionReturn(0);
1251 }
1252 
1253 #undef __FUNCT__
1254 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1255 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1256 {
1257   PetscViewerFormat format;
1258   PetscErrorCode    ierr;
1259 
1260   PetscFunctionBegin;
1261   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1262   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1263   PetscFunctionReturn(0);
1264 }
1265 
1266 #undef __FUNCT__
1267 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1268 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1269 {
1270   PetscBool      iascii;
1271   PetscErrorCode ierr;
1272 
1273   PetscFunctionBegin;
1274   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1275   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1276   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1277   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1278   PetscFunctionReturn(0);
1279 }
1280 
1281 #if defined(PETSC_HAVE_PARMETIS)
1282 #include <parmetis.h>
1283 #endif
1284 
1285 #undef __FUNCT__
1286 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1287 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1288 {
1289 #if defined(PETSC_HAVE_PARMETIS)
1290   MPI_Comm       comm;
1291   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1292   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1293   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1294   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1295   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1296   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1297   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1298   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1299   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1300   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1301   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1302   PetscInt       options[5];               /* Options */
1303   /* Outputs */
1304   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1305   PetscInt      *assignment, *points;
1306   PetscMPIInt    rank, p, v, i;
1307   PetscErrorCode ierr;
1308 
1309   PetscFunctionBegin;
1310   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1311   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1312   options[0] = 0; /* Use all defaults */
1313   /* Calculate vertex distribution */
1314   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1315   vtxdist[0] = 0;
1316   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1317   for (p = 2; p <= nparts; ++p) {
1318     vtxdist[p] += vtxdist[p-1];
1319   }
1320   /* Calculate weights */
1321   for (p = 0; p < nparts; ++p) {
1322     tpwgts[p] = 1.0/nparts;
1323   }
1324   ubvec[0] = 1.05;
1325 
1326   if (nparts == 1) {
1327     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1328   } else {
1329     if (vtxdist[1] == vtxdist[nparts]) {
1330       if (!rank) {
1331         PetscStackPush("METIS_PartGraphKway");
1332         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1333         PetscStackPop;
1334         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1335       }
1336     } else {
1337       PetscStackPush("ParMETIS_V3_PartKway");
1338       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1339       PetscStackPop;
1340       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1341     }
1342   }
1343   /* Convert to PetscSection+IS */
1344   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1345   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1346   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1347   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1348   for (p = 0, i = 0; p < nparts; ++p) {
1349     for (v = 0; v < nvtxs; ++v) {
1350       if (assignment[v] == p) points[i++] = v;
1351     }
1352   }
1353   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1354   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1355   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1356   PetscFunctionReturn(0);
1357 #else
1358   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1359 #endif
1360 }
1361 
1362 #undef __FUNCT__
1363 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1364 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1365 {
1366   PetscFunctionBegin;
1367   part->ops->view      = PetscPartitionerView_ParMetis;
1368   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1369   part->ops->partition = PetscPartitionerPartition_ParMetis;
1370   PetscFunctionReturn(0);
1371 }
1372 
1373 /*MC
1374   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1375 
1376   Level: intermediate
1377 
1378 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1379 M*/
1380 
1381 #undef __FUNCT__
1382 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1383 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1384 {
1385   PetscPartitioner_ParMetis *p;
1386   PetscErrorCode          ierr;
1387 
1388   PetscFunctionBegin;
1389   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1390   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1391   part->data = p;
1392 
1393   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1394   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1395   PetscFunctionReturn(0);
1396 }
1397 
1398 #undef __FUNCT__
1399 #define __FUNCT__ "DMPlexGetPartitioner"
1400 /*@
1401   DMPlexGetPartitioner - Get the mesh partitioner
1402 
1403   Not collective
1404 
1405   Input Parameter:
1406 . dm - The DM
1407 
1408   Output Parameter:
1409 . part - The PetscPartitioner
1410 
1411   Level: developer
1412 
1413   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1414 
1415 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1416 @*/
1417 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1418 {
1419   DM_Plex *mesh = (DM_Plex *) dm->data;
1420 
1421   PetscFunctionBegin;
1422   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1423   PetscValidPointer(part, 2);
1424   *part = mesh->partitioner;
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 #undef __FUNCT__
1429 #define __FUNCT__ "DMPlexSetPartitioner"
1430 /*@
1431   DMPlexSetPartitioner - Set the mesh partitioner
1432 
1433   logically collective on dm and part
1434 
1435   Input Parameters:
1436 + dm - The DM
1437 - part - The partitioner
1438 
1439   Level: developer
1440 
1441   Note: Any existing PetscPartitioner will be destroyed.
1442 
1443 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1444 @*/
1445 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1446 {
1447   DM_Plex       *mesh = (DM_Plex *) dm->data;
1448   PetscErrorCode ierr;
1449 
1450   PetscFunctionBegin;
1451   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1452   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1453   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1454   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1455   mesh->partitioner = part;
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 #undef __FUNCT__
1460 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1461 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1462 {
1463   PetscErrorCode ierr;
1464 
1465   PetscFunctionBegin;
1466   if (up) {
1467     PetscInt parent;
1468 
1469     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1470     if (parent != point) {
1471       PetscInt closureSize, *closure = NULL, i;
1472 
1473       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1474       for (i = 0; i < closureSize; i++) {
1475         PetscInt cpoint = closure[2*i];
1476 
1477         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1478         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1479       }
1480       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1481     }
1482   }
1483   if (down) {
1484     PetscInt numChildren;
1485     const PetscInt *children;
1486 
1487     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1488     if (numChildren) {
1489       PetscInt i;
1490 
1491       for (i = 0; i < numChildren; i++) {
1492         PetscInt cpoint = children[i];
1493 
1494         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1495         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1496       }
1497     }
1498   }
1499   PetscFunctionReturn(0);
1500 }
1501 
1502 #undef __FUNCT__
1503 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1504 /*@
1505   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1506 
1507   Input Parameters:
1508 + dm     - The DM
1509 - label  - DMLabel assinging ranks to remote roots
1510 
1511   Level: developer
1512 
1513 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1514 @*/
1515 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1516 {
1517   IS              rankIS,   pointIS;
1518   const PetscInt *ranks,   *points;
1519   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1520   PetscInt       *closure = NULL;
1521   PetscErrorCode  ierr;
1522 
1523   PetscFunctionBegin;
1524   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1525   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1526   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1527   for (r = 0; r < numRanks; ++r) {
1528     const PetscInt rank = ranks[r];
1529 
1530     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1531     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1532     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1533     for (p = 0; p < numPoints; ++p) {
1534       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1535       for (c = 0; c < closureSize*2; c += 2) {
1536         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1537         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1538       }
1539     }
1540     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1541     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1542   }
1543   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1544   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1545   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1546   PetscFunctionReturn(0);
1547 }
1548 
1549 #undef __FUNCT__
1550 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1551 /*@
1552   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1553 
1554   Input Parameters:
1555 + dm     - The DM
1556 - label  - DMLabel assinging ranks to remote roots
1557 
1558   Level: developer
1559 
1560 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1561 @*/
1562 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1563 {
1564   IS              rankIS,   pointIS;
1565   const PetscInt *ranks,   *points;
1566   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1567   PetscInt       *adj = NULL;
1568   PetscErrorCode  ierr;
1569 
1570   PetscFunctionBegin;
1571   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1572   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1573   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1574   for (r = 0; r < numRanks; ++r) {
1575     const PetscInt rank = ranks[r];
1576 
1577     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1578     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1579     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1580     for (p = 0; p < numPoints; ++p) {
1581       adjSize = PETSC_DETERMINE;
1582       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1583       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1584     }
1585     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1586     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1587   }
1588   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1589   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1590   ierr = PetscFree(adj);CHKERRQ(ierr);
1591   PetscFunctionReturn(0);
1592 }
1593 
1594 #undef __FUNCT__
1595 #define __FUNCT__ "DMPlexPartitionLabelPropagate"
1596 /*@
1597   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1598 
1599   Input Parameters:
1600 + dm     - The DM
1601 - label  - DMLabel assinging ranks to remote roots
1602 
1603   Level: developer
1604 
1605   Note: This is required when generating multi-level overlaps to capture
1606   overlap points from non-neighbouring partitions.
1607 
1608 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1609 @*/
1610 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1611 {
1612   MPI_Comm        comm;
1613   PetscMPIInt     rank;
1614   PetscSF         sfPoint;
1615   DMLabel         lblRoots, lblLeaves;
1616   IS              rankIS, pointIS;
1617   const PetscInt *ranks;
1618   PetscInt        numRanks, r;
1619   PetscErrorCode  ierr;
1620 
1621   PetscFunctionBegin;
1622   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1623   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1624   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1625   /* Pull point contributions from remote leaves into local roots */
1626   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1627   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1628   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1629   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1630   for (r = 0; r < numRanks; ++r) {
1631     const PetscInt remoteRank = ranks[r];
1632     if (remoteRank == rank) continue;
1633     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1634     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1635     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1636   }
1637   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1638   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1639   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1640   /* Push point contributions from roots into remote leaves */
1641   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1642   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1643   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1644   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1645   for (r = 0; r < numRanks; ++r) {
1646     const PetscInt remoteRank = ranks[r];
1647     if (remoteRank == rank) continue;
1648     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1649     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1650     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1651   }
1652   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1653   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1654   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1655   PetscFunctionReturn(0);
1656 }
1657 
1658 #undef __FUNCT__
1659 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1660 /*@
1661   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1662 
1663   Input Parameters:
1664 + dm        - The DM
1665 . rootLabel - DMLabel assinging ranks to local roots
1666 . processSF - A star forest mapping into the local index on each remote rank
1667 
1668   Output Parameter:
1669 - leafLabel - DMLabel assinging ranks to remote roots
1670 
1671   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1672   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1673 
1674   Level: developer
1675 
1676 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1677 @*/
1678 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1679 {
1680   MPI_Comm           comm;
1681   PetscMPIInt        rank, numProcs;
1682   PetscInt           p, n, numNeighbors, size, l, nleaves;
1683   PetscSF            sfPoint;
1684   PetscSFNode       *rootPoints, *leafPoints;
1685   PetscSection       rootSection, leafSection;
1686   const PetscSFNode *remote;
1687   const PetscInt    *local, *neighbors;
1688   IS                 valueIS;
1689   PetscErrorCode     ierr;
1690 
1691   PetscFunctionBegin;
1692   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1693   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1694   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1695   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1696 
1697   /* Convert to (point, rank) and use actual owners */
1698   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1699   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1700   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1701   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1702   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1703   for (n = 0; n < numNeighbors; ++n) {
1704     PetscInt numPoints;
1705 
1706     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1707     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1708   }
1709   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1710   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1711   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1712   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1713   for (n = 0; n < numNeighbors; ++n) {
1714     IS              pointIS;
1715     const PetscInt *points;
1716     PetscInt        off, numPoints, p;
1717 
1718     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1719     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1720     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1721     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1722     for (p = 0; p < numPoints; ++p) {
1723       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1724       else       {l = -1;}
1725       if (l >= 0) {rootPoints[off+p] = remote[l];}
1726       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1727     }
1728     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1729     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1730   }
1731   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1732   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1733   /* Communicate overlap */
1734   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1735   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1736   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1737   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1738   for (p = 0; p < size; p++) {
1739     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1740   }
1741   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1742   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1743   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1744   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1745   PetscFunctionReturn(0);
1746 }
1747 
1748 #undef __FUNCT__
1749 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1750 /*@
1751   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1752 
1753   Input Parameters:
1754 + dm    - The DM
1755 . label - DMLabel assinging ranks to remote roots
1756 
1757   Output Parameter:
1758 - sf    - The star forest communication context encapsulating the defined mapping
1759 
1760   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1761 
1762   Level: developer
1763 
1764 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1765 @*/
1766 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1767 {
1768   PetscMPIInt     rank, numProcs;
1769   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1770   PetscSFNode    *remotePoints;
1771   IS              remoteRootIS;
1772   const PetscInt *remoteRoots;
1773   PetscErrorCode ierr;
1774 
1775   PetscFunctionBegin;
1776   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1777   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1778 
1779   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1780     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1781     numRemote += numPoints;
1782   }
1783   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1784   /* Put owned points first */
1785   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1786   if (numPoints > 0) {
1787     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1788     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1789     for (p = 0; p < numPoints; p++) {
1790       remotePoints[idx].index = remoteRoots[p];
1791       remotePoints[idx].rank = rank;
1792       idx++;
1793     }
1794     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1795     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1796   }
1797   /* Now add remote points */
1798   for (n = 0; n < numProcs; ++n) {
1799     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1800     if (numPoints <= 0 || n == rank) continue;
1801     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1802     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1803     for (p = 0; p < numPoints; p++) {
1804       remotePoints[idx].index = remoteRoots[p];
1805       remotePoints[idx].rank = n;
1806       idx++;
1807     }
1808     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1809     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1810   }
1811   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1812   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1813   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1814   PetscFunctionReturn(0);
1815 }
1816