xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 1ceb14c030f320ad962f864c6f8de98a26bbbaf7)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
111   if (nroots > 0) {
112     ISLocalToGlobalMapping ltogCells;
113     PetscInt n, size, *cells_arr;
114     /* In parallel, apply a global cell numbering to the graph */
115     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
119     /* Convert to positive global cell numbers */
120     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
121     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
124   }
125   if (adjacency) *adjacency = graph;
126   /* Clean up */
127   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
128   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
129   ierr = PetscFree(adj);CHKERRQ(ierr);
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "DMPlexCreateNeighborCSR"
135 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136 {
137   const PetscInt maxFaceCases = 30;
138   PetscInt       numFaceCases = 0;
139   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140   PetscInt      *off, *adj;
141   PetscInt      *neighborCells = NULL;
142   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   /* For parallel partitioning, I think you have to communicate supports */
147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
148   cellDim = dim - cellHeight;
149   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
150   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
151   if (cEnd - cStart == 0) {
152     if (numVertices) *numVertices = 0;
153     if (offsets)   *offsets   = NULL;
154     if (adjacency) *adjacency = NULL;
155     PetscFunctionReturn(0);
156   }
157   numCells  = cEnd - cStart;
158   faceDepth = depth - cellHeight;
159   if (dim == depth) {
160     PetscInt f, fStart, fEnd;
161 
162     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
163     /* Count neighboring cells */
164     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
165     for (f = fStart; f < fEnd; ++f) {
166       const PetscInt *support;
167       PetscInt        supportSize;
168       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
169       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
170       if (supportSize == 2) {
171         PetscInt numChildren;
172 
173         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
174         if (!numChildren) {
175           ++off[support[0]-cStart+1];
176           ++off[support[1]-cStart+1];
177         }
178       }
179     }
180     /* Prefix sum */
181     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182     if (adjacency) {
183       PetscInt *tmp;
184 
185       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
186       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
187       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
188       /* Get neighboring cells */
189       for (f = fStart; f < fEnd; ++f) {
190         const PetscInt *support;
191         PetscInt        supportSize;
192         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
193         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
194         if (supportSize == 2) {
195           PetscInt numChildren;
196 
197           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
198           if (!numChildren) {
199             adj[tmp[support[0]-cStart]++] = support[1];
200             adj[tmp[support[1]-cStart]++] = support[0];
201           }
202         }
203       }
204 #if defined(PETSC_USE_DEBUG)
205       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206 #endif
207       ierr = PetscFree(tmp);CHKERRQ(ierr);
208     }
209     if (numVertices) *numVertices = numCells;
210     if (offsets)   *offsets   = off;
211     if (adjacency) *adjacency = adj;
212     PetscFunctionReturn(0);
213   }
214   /* Setup face recognition */
215   if (faceDepth == 1) {
216     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217 
218     for (c = cStart; c < cEnd; ++c) {
219       PetscInt corners;
220 
221       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
222       if (!cornersSeen[corners]) {
223         PetscInt nFV;
224 
225         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226         cornersSeen[corners] = 1;
227 
228         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
229 
230         numFaceVertices[numFaceCases++] = nFV;
231       }
232     }
233   }
234   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
235   /* Count neighboring cells */
236   for (cell = cStart; cell < cEnd; ++cell) {
237     PetscInt numNeighbors = PETSC_DETERMINE, n;
238 
239     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
240     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241     for (n = 0; n < numNeighbors; ++n) {
242       PetscInt        cellPair[2];
243       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244       PetscInt        meetSize = 0;
245       const PetscInt *meet    = NULL;
246 
247       cellPair[0] = cell; cellPair[1] = neighborCells[n];
248       if (cellPair[0] == cellPair[1]) continue;
249       if (!found) {
250         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
251         if (meetSize) {
252           PetscInt f;
253 
254           for (f = 0; f < numFaceCases; ++f) {
255             if (numFaceVertices[f] == meetSize) {
256               found = PETSC_TRUE;
257               break;
258             }
259           }
260         }
261         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
262       }
263       if (found) ++off[cell-cStart+1];
264     }
265   }
266   /* Prefix sum */
267   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268 
269   if (adjacency) {
270     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
271     /* Get neighboring cells */
272     for (cell = cStart; cell < cEnd; ++cell) {
273       PetscInt numNeighbors = PETSC_DETERMINE, n;
274       PetscInt cellOffset   = 0;
275 
276       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
277       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278       for (n = 0; n < numNeighbors; ++n) {
279         PetscInt        cellPair[2];
280         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281         PetscInt        meetSize = 0;
282         const PetscInt *meet    = NULL;
283 
284         cellPair[0] = cell; cellPair[1] = neighborCells[n];
285         if (cellPair[0] == cellPair[1]) continue;
286         if (!found) {
287           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
288           if (meetSize) {
289             PetscInt f;
290 
291             for (f = 0; f < numFaceCases; ++f) {
292               if (numFaceVertices[f] == meetSize) {
293                 found = PETSC_TRUE;
294                 break;
295               }
296             }
297           }
298           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
299         }
300         if (found) {
301           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302           ++cellOffset;
303         }
304       }
305     }
306   }
307   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
308   if (numVertices) *numVertices = numCells;
309   if (offsets)   *offsets   = off;
310   if (adjacency) *adjacency = adj;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PetscPartitionerRegister"
316 /*@C
317   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
318 
319   Not Collective
320 
321   Input Parameters:
322 + name        - The name of a new user-defined creation routine
323 - create_func - The creation routine itself
324 
325   Notes:
326   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
327 
328   Sample usage:
329 .vb
330     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
331 .ve
332 
333   Then, your PetscPartitioner type can be chosen with the procedural interface via
334 .vb
335     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
336     PetscPartitionerSetType(PetscPartitioner, "my_part");
337 .ve
338    or at runtime via the option
339 .vb
340     -petscpartitioner_type my_part
341 .ve
342 
343   Level: advanced
344 
345 .keywords: PetscPartitioner, register
346 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
347 
348 @*/
349 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
350 {
351   PetscErrorCode ierr;
352 
353   PetscFunctionBegin;
354   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PetscPartitionerSetType"
360 /*@C
361   PetscPartitionerSetType - Builds a particular PetscPartitioner
362 
363   Collective on PetscPartitioner
364 
365   Input Parameters:
366 + part - The PetscPartitioner object
367 - name - The kind of partitioner
368 
369   Options Database Key:
370 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
371 
372   Level: intermediate
373 
374 .keywords: PetscPartitioner, set, type
375 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
376 @*/
377 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
378 {
379   PetscErrorCode (*r)(PetscPartitioner);
380   PetscBool      match;
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
385   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
386   if (match) PetscFunctionReturn(0);
387 
388   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
389   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
390   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
391 
392   if (part->ops->destroy) {
393     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
394     part->ops->destroy = NULL;
395   }
396   ierr = (*r)(part);CHKERRQ(ierr);
397   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
398   PetscFunctionReturn(0);
399 }
400 
401 #undef __FUNCT__
402 #define __FUNCT__ "PetscPartitionerGetType"
403 /*@C
404   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
405 
406   Not Collective
407 
408   Input Parameter:
409 . part - The PetscPartitioner
410 
411   Output Parameter:
412 . name - The PetscPartitioner type name
413 
414   Level: intermediate
415 
416 .keywords: PetscPartitioner, get, type, name
417 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
418 @*/
419 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
420 {
421   PetscErrorCode ierr;
422 
423   PetscFunctionBegin;
424   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
425   PetscValidPointer(name, 2);
426   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
427   *name = ((PetscObject) part)->type_name;
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "PetscPartitionerView"
433 /*@C
434   PetscPartitionerView - Views a PetscPartitioner
435 
436   Collective on PetscPartitioner
437 
438   Input Parameter:
439 + part - the PetscPartitioner object to view
440 - v    - the viewer
441 
442   Level: developer
443 
444 .seealso: PetscPartitionerDestroy()
445 @*/
446 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
447 {
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
452   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
453   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
454   PetscFunctionReturn(0);
455 }
456 
457 #undef __FUNCT__
458 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
459 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
460 {
461   const char    *defaultType;
462   char           name[256];
463   PetscBool      flg;
464   PetscErrorCode ierr;
465 
466   PetscFunctionBegin;
467   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
468   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
469   else                                  defaultType = ((PetscObject) part)->type_name;
470   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
471 
472   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
473   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
474   if (flg) {
475     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
476   } else if (!((PetscObject) part)->type_name) {
477     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
478   }
479   ierr = PetscOptionsEnd();CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 #undef __FUNCT__
484 #define __FUNCT__ "PetscPartitionerSetFromOptions"
485 /*@
486   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
487 
488   Collective on PetscPartitioner
489 
490   Input Parameter:
491 . part - the PetscPartitioner object to set options for
492 
493   Level: developer
494 
495 .seealso: PetscPartitionerView()
496 @*/
497 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
498 {
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
503   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
504 
505   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
506   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
507   /* process any options handlers added with PetscObjectAddOptionsHandler() */
508   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
509   ierr = PetscOptionsEnd();CHKERRQ(ierr);
510   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
511   PetscFunctionReturn(0);
512 }
513 
514 #undef __FUNCT__
515 #define __FUNCT__ "PetscPartitionerSetUp"
516 /*@C
517   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
518 
519   Collective on PetscPartitioner
520 
521   Input Parameter:
522 . part - the PetscPartitioner object to setup
523 
524   Level: developer
525 
526 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
527 @*/
528 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
529 {
530   PetscErrorCode ierr;
531 
532   PetscFunctionBegin;
533   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
534   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
535   PetscFunctionReturn(0);
536 }
537 
538 #undef __FUNCT__
539 #define __FUNCT__ "PetscPartitionerDestroy"
540 /*@
541   PetscPartitionerDestroy - Destroys a PetscPartitioner object
542 
543   Collective on PetscPartitioner
544 
545   Input Parameter:
546 . part - the PetscPartitioner object to destroy
547 
548   Level: developer
549 
550 .seealso: PetscPartitionerView()
551 @*/
552 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
553 {
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   if (!*part) PetscFunctionReturn(0);
558   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
559 
560   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
561   ((PetscObject) (*part))->refct = 0;
562 
563   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
564   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
565   PetscFunctionReturn(0);
566 }
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "PetscPartitionerCreate"
570 /*@
571   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
572 
573   Collective on MPI_Comm
574 
575   Input Parameter:
576 . comm - The communicator for the PetscPartitioner object
577 
578   Output Parameter:
579 . part - The PetscPartitioner object
580 
581   Level: beginner
582 
583 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE
584 @*/
585 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
586 {
587   PetscPartitioner p;
588   PetscErrorCode   ierr;
589 
590   PetscFunctionBegin;
591   PetscValidPointer(part, 2);
592   *part = NULL;
593   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
594 
595   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
596 
597   *part = p;
598   PetscFunctionReturn(0);
599 }
600 
601 #undef __FUNCT__
602 #define __FUNCT__ "PetscPartitionerPartition"
603 /*@
604   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
605 
606   Collective on DM
607 
608   Input Parameters:
609 + part    - The PetscPartitioner
610 - dm      - The mesh DM
611 
612   Output Parameters:
613 + partSection     - The PetscSection giving the division of points by partition
614 - partition       - The list of points by partition
615 
616   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
617 
618   Level: developer
619 
620 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
621 @*/
622 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
623 {
624   PetscMPIInt    size;
625   PetscErrorCode ierr;
626 
627   PetscFunctionBegin;
628   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
629   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
630   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
631   PetscValidPointer(partition, 5);
632   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
633   if (size == 1) {
634     PetscInt *points;
635     PetscInt  cStart, cEnd, c;
636 
637     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
638     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
639     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
640     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
641     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
642     for (c = cStart; c < cEnd; ++c) points[c] = c;
643     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
644     PetscFunctionReturn(0);
645   }
646   if (part->height == 0) {
647     PetscInt  numVertices;
648     PetscInt *start     = NULL;
649     PetscInt *adjacency = NULL;
650 
651     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
652     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
653     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
654     ierr = PetscFree(start);CHKERRQ(ierr);
655     ierr = PetscFree(adjacency);CHKERRQ(ierr);
656   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
657   PetscFunctionReturn(0);
658 }
659 
660 #undef __FUNCT__
661 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
662 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
663 {
664   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
665   PetscErrorCode          ierr;
666 
667   PetscFunctionBegin;
668   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
669   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
670   ierr = PetscFree(p);CHKERRQ(ierr);
671   PetscFunctionReturn(0);
672 }
673 
674 #undef __FUNCT__
675 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
676 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
677 {
678   PetscViewerFormat format;
679   PetscErrorCode    ierr;
680 
681   PetscFunctionBegin;
682   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
683   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 #undef __FUNCT__
688 #define __FUNCT__ "PetscPartitionerView_Shell"
689 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
690 {
691   PetscBool      iascii;
692   PetscErrorCode ierr;
693 
694   PetscFunctionBegin;
695   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
696   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
697   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
698   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
699   PetscFunctionReturn(0);
700 }
701 
702 #undef __FUNCT__
703 #define __FUNCT__ "PetscPartitionerPartition_Shell"
704 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
705 {
706   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
707   PetscInt                np;
708   PetscErrorCode          ierr;
709 
710   PetscFunctionBegin;
711   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
712   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
713   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
714   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
715   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
716   *partition = p->partition;
717   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
718   PetscFunctionReturn(0);
719 }
720 
721 #undef __FUNCT__
722 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
723 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
724 {
725   PetscFunctionBegin;
726   part->ops->view      = PetscPartitionerView_Shell;
727   part->ops->destroy   = PetscPartitionerDestroy_Shell;
728   part->ops->partition = PetscPartitionerPartition_Shell;
729   PetscFunctionReturn(0);
730 }
731 
732 /*MC
733   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
734 
735   Level: intermediate
736 
737 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
738 M*/
739 
740 #undef __FUNCT__
741 #define __FUNCT__ "PetscPartitionerCreate_Shell"
742 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
743 {
744   PetscPartitioner_Shell *p;
745   PetscErrorCode          ierr;
746 
747   PetscFunctionBegin;
748   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
749   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
750   part->data = p;
751 
752   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
753   PetscFunctionReturn(0);
754 }
755 
756 #undef __FUNCT__
757 #define __FUNCT__ "PetscPartitionerShellSetPartition"
758 /*@C
759   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
760 
761   Collective on PART
762 
763   Input Parameters:
764 + part     - The PetscPartitioner
765 . numProcs - The number of partitions
766 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
767 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
768 
769   Level: developer
770 
771   Notes:
772 
773     It is safe to free the sizes and points arrays after use in this routine.
774 
775 .seealso DMPlexDistribute(), PetscPartitionerCreate()
776 @*/
777 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
778 {
779   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
780   PetscInt                proc, numPoints;
781   PetscErrorCode          ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
785   if (sizes) {PetscValidPointer(sizes, 3);}
786   if (sizes) {PetscValidPointer(points, 4);}
787   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
788   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
789   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
790   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
791   if (sizes) {
792     for (proc = 0; proc < numProcs; ++proc) {
793       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
794     }
795   }
796   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
797   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
798   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
804 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
805 {
806   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
807   PetscErrorCode          ierr;
808 
809   PetscFunctionBegin;
810   ierr = PetscFree(p);CHKERRQ(ierr);
811   PetscFunctionReturn(0);
812 }
813 
814 #undef __FUNCT__
815 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
816 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
817 {
818   PetscViewerFormat format;
819   PetscErrorCode    ierr;
820 
821   PetscFunctionBegin;
822   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
823   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
824   PetscFunctionReturn(0);
825 }
826 
827 #undef __FUNCT__
828 #define __FUNCT__ "PetscPartitionerView_Simple"
829 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
830 {
831   PetscBool      iascii;
832   PetscErrorCode ierr;
833 
834   PetscFunctionBegin;
835   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
836   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
837   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
838   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
839   PetscFunctionReturn(0);
840 }
841 
842 #undef __FUNCT__
843 #define __FUNCT__ "PetscPartitionerPartition_Simple"
844 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
845 {
846   MPI_Comm       comm;
847   PetscInt       np;
848   PetscMPIInt    size;
849   PetscErrorCode ierr;
850 
851   PetscFunctionBegin;
852   comm = PetscObjectComm((PetscObject)dm);
853   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
854   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
855   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
856   if (size == 1) {
857     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
858   }
859   else {
860     PetscMPIInt rank;
861     PetscInt nvGlobal, *offsets, myFirst, myLast;
862 
863     ierr = PetscMalloc1(size+1,&offsets);
864     offsets[0] = 0;
865     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
866     for (np = 2; np <= size; np++) {
867       offsets[np] += offsets[np-1];
868     }
869     nvGlobal = offsets[size];
870     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
871     myFirst = offsets[rank];
872     myLast  = offsets[rank + 1] - 1;
873     ierr = PetscFree(offsets);CHKERRQ(ierr);
874     if (numVertices) {
875       PetscInt firstPart = 0, firstLargePart = 0;
876       PetscInt lastPart = 0, lastLargePart = 0;
877       PetscInt rem = nvGlobal % nparts;
878       PetscInt pSmall = nvGlobal/nparts;
879       PetscInt pBig = nvGlobal/nparts + 1;
880 
881 
882       if (rem) {
883         firstLargePart = myFirst / pBig;
884         lastLargePart  = myLast  / pBig;
885 
886         if (firstLargePart < rem) {
887           firstPart = firstLargePart;
888         }
889         else {
890           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
891         }
892         if (lastLargePart < rem) {
893           lastPart = lastLargePart;
894         }
895         else {
896           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
897         }
898       }
899       else {
900         firstPart = myFirst / (nvGlobal/nparts);
901         lastPart  = myLast  / (nvGlobal/nparts);
902       }
903 
904       for (np = firstPart; np <= lastPart; np++) {
905         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
906         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
907 
908         PartStart = PetscMax(PartStart,myFirst);
909         PartEnd   = PetscMin(PartEnd,myLast+1);
910         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
911       }
912     }
913   }
914   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
915   PetscFunctionReturn(0);
916 }
917 
918 #undef __FUNCT__
919 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
920 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
921 {
922   PetscFunctionBegin;
923   part->ops->view      = PetscPartitionerView_Simple;
924   part->ops->destroy   = PetscPartitionerDestroy_Simple;
925   part->ops->partition = PetscPartitionerPartition_Simple;
926   PetscFunctionReturn(0);
927 }
928 
929 /*MC
930   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
931 
932   Level: intermediate
933 
934 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
935 M*/
936 
937 #undef __FUNCT__
938 #define __FUNCT__ "PetscPartitionerCreate_Simple"
939 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
940 {
941   PetscPartitioner_Simple *p;
942   PetscErrorCode           ierr;
943 
944   PetscFunctionBegin;
945   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
946   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
947   part->data = p;
948 
949   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
950   PetscFunctionReturn(0);
951 }
952 
953 #undef __FUNCT__
954 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
955 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
956 {
957   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
958   PetscErrorCode          ierr;
959 
960   PetscFunctionBegin;
961   ierr = PetscFree(p);CHKERRQ(ierr);
962   PetscFunctionReturn(0);
963 }
964 
965 #undef __FUNCT__
966 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
967 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
968 {
969   PetscViewerFormat format;
970   PetscErrorCode    ierr;
971 
972   PetscFunctionBegin;
973   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
974   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
975   PetscFunctionReturn(0);
976 }
977 
978 #undef __FUNCT__
979 #define __FUNCT__ "PetscPartitionerView_Chaco"
980 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
981 {
982   PetscBool      iascii;
983   PetscErrorCode ierr;
984 
985   PetscFunctionBegin;
986   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
987   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
988   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
989   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
990   PetscFunctionReturn(0);
991 }
992 
993 #if defined(PETSC_HAVE_CHACO)
994 #if defined(PETSC_HAVE_UNISTD_H)
995 #include <unistd.h>
996 #endif
997 /* Chaco does not have an include file */
998 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
999                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1000                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1001                        int mesh_dims[3], double *goal, int global_method, int local_method,
1002                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1003 
1004 extern int FREE_GRAPH;
1005 #endif
1006 
1007 #undef __FUNCT__
1008 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1009 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1010 {
1011 #if defined(PETSC_HAVE_CHACO)
1012   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1013   MPI_Comm       comm;
1014   int            nvtxs          = numVertices; /* number of vertices in full graph */
1015   int           *vwgts          = NULL;   /* weights for all vertices */
1016   float         *ewgts          = NULL;   /* weights for all edges */
1017   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1018   char          *outassignname  = NULL;   /*  name of assignment output file */
1019   char          *outfilename    = NULL;   /* output file name */
1020   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1021   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1022   int            mesh_dims[3];            /* dimensions of mesh of processors */
1023   double        *goal          = NULL;    /* desired set sizes for each set */
1024   int            global_method = 1;       /* global partitioning algorithm */
1025   int            local_method  = 1;       /* local partitioning algorithm */
1026   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1027   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1028   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1029   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1030   long           seed          = 123636512; /* for random graph mutations */
1031   short int     *assignment;              /* Output partition */
1032   int            fd_stdout, fd_pipe[2];
1033   PetscInt      *points;
1034   int            i, v, p;
1035   PetscErrorCode ierr;
1036 
1037   PetscFunctionBegin;
1038   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1039   if (!numVertices) {
1040     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1041     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1042     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1043     PetscFunctionReturn(0);
1044   }
1045   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1046   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1047 
1048   if (global_method == INERTIAL_METHOD) {
1049     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1050     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1051   }
1052   mesh_dims[0] = nparts;
1053   mesh_dims[1] = 1;
1054   mesh_dims[2] = 1;
1055   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1056   /* Chaco outputs to stdout. We redirect this to a buffer. */
1057   /* TODO: check error codes for UNIX calls */
1058 #if defined(PETSC_HAVE_UNISTD_H)
1059   {
1060     int piperet;
1061     piperet = pipe(fd_pipe);
1062     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1063     fd_stdout = dup(1);
1064     close(1);
1065     dup2(fd_pipe[1], 1);
1066   }
1067 #endif
1068   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1069                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1070                    vmax, ndims, eigtol, seed);
1071 #if defined(PETSC_HAVE_UNISTD_H)
1072   {
1073     char msgLog[10000];
1074     int  count;
1075 
1076     fflush(stdout);
1077     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1078     if (count < 0) count = 0;
1079     msgLog[count] = 0;
1080     close(1);
1081     dup2(fd_stdout, 1);
1082     close(fd_stdout);
1083     close(fd_pipe[0]);
1084     close(fd_pipe[1]);
1085     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1086   }
1087 #endif
1088   /* Convert to PetscSection+IS */
1089   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1090   for (v = 0; v < nvtxs; ++v) {
1091     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1092   }
1093   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1094   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1095   for (p = 0, i = 0; p < nparts; ++p) {
1096     for (v = 0; v < nvtxs; ++v) {
1097       if (assignment[v] == p) points[i++] = v;
1098     }
1099   }
1100   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1101   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1102   if (global_method == INERTIAL_METHOD) {
1103     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1104   }
1105   ierr = PetscFree(assignment);CHKERRQ(ierr);
1106   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1107   PetscFunctionReturn(0);
1108 #else
1109   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1110 #endif
1111 }
1112 
1113 #undef __FUNCT__
1114 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1115 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1116 {
1117   PetscFunctionBegin;
1118   part->ops->view      = PetscPartitionerView_Chaco;
1119   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1120   part->ops->partition = PetscPartitionerPartition_Chaco;
1121   PetscFunctionReturn(0);
1122 }
1123 
1124 /*MC
1125   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1126 
1127   Level: intermediate
1128 
1129 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1130 M*/
1131 
1132 #undef __FUNCT__
1133 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1134 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1135 {
1136   PetscPartitioner_Chaco *p;
1137   PetscErrorCode          ierr;
1138 
1139   PetscFunctionBegin;
1140   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1141   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1142   part->data = p;
1143 
1144   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1145   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 #undef __FUNCT__
1150 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1151 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1152 {
1153   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1154   PetscErrorCode             ierr;
1155 
1156   PetscFunctionBegin;
1157   ierr = PetscFree(p);CHKERRQ(ierr);
1158   PetscFunctionReturn(0);
1159 }
1160 
1161 #undef __FUNCT__
1162 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1163 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1164 {
1165   PetscViewerFormat format;
1166   PetscErrorCode    ierr;
1167 
1168   PetscFunctionBegin;
1169   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1170   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1171   PetscFunctionReturn(0);
1172 }
1173 
1174 #undef __FUNCT__
1175 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1176 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1177 {
1178   PetscBool      iascii;
1179   PetscErrorCode ierr;
1180 
1181   PetscFunctionBegin;
1182   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1183   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1184   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1185   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1186   PetscFunctionReturn(0);
1187 }
1188 
1189 #if defined(PETSC_HAVE_PARMETIS)
1190 #include <parmetis.h>
1191 #endif
1192 
1193 #undef __FUNCT__
1194 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1195 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1196 {
1197 #if defined(PETSC_HAVE_PARMETIS)
1198   MPI_Comm       comm;
1199   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1200   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1201   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1202   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1203   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1204   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1205   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1206   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1207   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1208   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1209   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1210   PetscInt       options[5];               /* Options */
1211   /* Outputs */
1212   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1213   PetscInt      *assignment, *points;
1214   PetscMPIInt    rank, p, v, i;
1215   PetscErrorCode ierr;
1216 
1217   PetscFunctionBegin;
1218   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1219   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1220   options[0] = 0; /* Use all defaults */
1221   /* Calculate vertex distribution */
1222   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1223   vtxdist[0] = 0;
1224   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1225   for (p = 2; p <= nparts; ++p) {
1226     vtxdist[p] += vtxdist[p-1];
1227   }
1228   /* Calculate weights */
1229   for (p = 0; p < nparts; ++p) {
1230     tpwgts[p] = 1.0/nparts;
1231   }
1232   ubvec[0] = 1.05;
1233 
1234   if (nparts == 1) {
1235     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1236   } else {
1237     if (vtxdist[1] == vtxdist[nparts]) {
1238       if (!rank) {
1239         PetscStackPush("METIS_PartGraphKway");
1240         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1241         PetscStackPop;
1242         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1243       }
1244     } else {
1245       PetscStackPush("ParMETIS_V3_PartKway");
1246       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1247       PetscStackPop;
1248       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1249     }
1250   }
1251   /* Convert to PetscSection+IS */
1252   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1253   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1254   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1255   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1256   for (p = 0, i = 0; p < nparts; ++p) {
1257     for (v = 0; v < nvtxs; ++v) {
1258       if (assignment[v] == p) points[i++] = v;
1259     }
1260   }
1261   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1262   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1263   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1264   PetscFunctionReturn(0);
1265 #else
1266   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1267 #endif
1268 }
1269 
1270 #undef __FUNCT__
1271 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1272 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1273 {
1274   PetscFunctionBegin;
1275   part->ops->view      = PetscPartitionerView_ParMetis;
1276   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1277   part->ops->partition = PetscPartitionerPartition_ParMetis;
1278   PetscFunctionReturn(0);
1279 }
1280 
1281 /*MC
1282   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1283 
1284   Level: intermediate
1285 
1286 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1287 M*/
1288 
1289 #undef __FUNCT__
1290 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1291 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1292 {
1293   PetscPartitioner_ParMetis *p;
1294   PetscErrorCode          ierr;
1295 
1296   PetscFunctionBegin;
1297   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1298   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1299   part->data = p;
1300 
1301   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1302   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1303   PetscFunctionReturn(0);
1304 }
1305 
1306 #undef __FUNCT__
1307 #define __FUNCT__ "DMPlexGetPartitioner"
1308 /*@
1309   DMPlexGetPartitioner - Get the mesh partitioner
1310 
1311   Not collective
1312 
1313   Input Parameter:
1314 . dm - The DM
1315 
1316   Output Parameter:
1317 . part - The PetscPartitioner
1318 
1319   Level: developer
1320 
1321   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1322 
1323 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1324 @*/
1325 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1326 {
1327   DM_Plex *mesh = (DM_Plex *) dm->data;
1328 
1329   PetscFunctionBegin;
1330   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1331   PetscValidPointer(part, 2);
1332   *part = mesh->partitioner;
1333   PetscFunctionReturn(0);
1334 }
1335 
1336 #undef __FUNCT__
1337 #define __FUNCT__ "DMPlexSetPartitioner"
1338 /*@
1339   DMPlexSetPartitioner - Set the mesh partitioner
1340 
1341   logically collective on dm and part
1342 
1343   Input Parameters:
1344 + dm - The DM
1345 - part - The partitioner
1346 
1347   Level: developer
1348 
1349   Note: Any existing PetscPartitioner will be destroyed.
1350 
1351 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1352 @*/
1353 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1354 {
1355   DM_Plex       *mesh = (DM_Plex *) dm->data;
1356   PetscErrorCode ierr;
1357 
1358   PetscFunctionBegin;
1359   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1360   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1361   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1362   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1363   mesh->partitioner = part;
1364   PetscFunctionReturn(0);
1365 }
1366 
1367 #undef __FUNCT__
1368 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1369 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1370 {
1371   PetscErrorCode ierr;
1372 
1373   PetscFunctionBegin;
1374   if (up) {
1375     PetscInt parent;
1376 
1377     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1378     if (parent != point) {
1379       PetscInt closureSize, *closure = NULL, i;
1380 
1381       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1382       for (i = 0; i < closureSize; i++) {
1383         PetscInt cpoint = closure[2*i];
1384 
1385         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1386         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1387       }
1388       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1389     }
1390   }
1391   if (down) {
1392     PetscInt numChildren;
1393     const PetscInt *children;
1394 
1395     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1396     if (numChildren) {
1397       PetscInt i;
1398 
1399       for (i = 0; i < numChildren; i++) {
1400         PetscInt cpoint = children[i];
1401 
1402         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1403         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1404       }
1405     }
1406   }
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 #undef __FUNCT__
1411 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1412 /*@
1413   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1414 
1415   Input Parameters:
1416 + dm     - The DM
1417 - label  - DMLabel assinging ranks to remote roots
1418 
1419   Level: developer
1420 
1421 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1422 @*/
1423 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1424 {
1425   IS              rankIS,   pointIS;
1426   const PetscInt *ranks,   *points;
1427   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1428   PetscInt       *closure = NULL;
1429   PetscErrorCode  ierr;
1430 
1431   PetscFunctionBegin;
1432   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1433   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1434   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1435   for (r = 0; r < numRanks; ++r) {
1436     const PetscInt rank = ranks[r];
1437 
1438     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1439     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1440     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1441     for (p = 0; p < numPoints; ++p) {
1442       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1443       for (c = 0; c < closureSize*2; c += 2) {
1444         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1445         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1446       }
1447     }
1448     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1449     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1450   }
1451   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1452   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1453   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1454   PetscFunctionReturn(0);
1455 }
1456 
1457 #undef __FUNCT__
1458 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1459 /*@
1460   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1461 
1462   Input Parameters:
1463 + dm     - The DM
1464 - label  - DMLabel assinging ranks to remote roots
1465 
1466   Level: developer
1467 
1468 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1469 @*/
1470 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1471 {
1472   IS              rankIS,   pointIS;
1473   const PetscInt *ranks,   *points;
1474   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1475   PetscInt       *adj = NULL;
1476   PetscErrorCode  ierr;
1477 
1478   PetscFunctionBegin;
1479   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1480   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1481   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1482   for (r = 0; r < numRanks; ++r) {
1483     const PetscInt rank = ranks[r];
1484 
1485     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1486     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1487     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1488     for (p = 0; p < numPoints; ++p) {
1489       adjSize = PETSC_DETERMINE;
1490       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1491       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1492     }
1493     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1494     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1495   }
1496   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1497   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1498   ierr = PetscFree(adj);CHKERRQ(ierr);
1499   PetscFunctionReturn(0);
1500 }
1501 
1502 #undef __FUNCT__
1503 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1504 /*@
1505   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1506 
1507   Input Parameters:
1508 + dm        - The DM
1509 . rootLabel - DMLabel assinging ranks to local roots
1510 . processSF - A star forest mapping into the local index on each remote rank
1511 
1512   Output Parameter:
1513 - leafLabel - DMLabel assinging ranks to remote roots
1514 
1515   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1516   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1517 
1518   Level: developer
1519 
1520 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1521 @*/
1522 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1523 {
1524   MPI_Comm           comm;
1525   PetscMPIInt        rank, numProcs;
1526   PetscInt           p, n, numNeighbors, size, l, nleaves;
1527   PetscSF            sfPoint;
1528   PetscSFNode       *rootPoints, *leafPoints;
1529   PetscSection       rootSection, leafSection;
1530   const PetscSFNode *remote;
1531   const PetscInt    *local, *neighbors;
1532   IS                 valueIS;
1533   PetscErrorCode     ierr;
1534 
1535   PetscFunctionBegin;
1536   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1537   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1538   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1539   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1540 
1541   /* Convert to (point, rank) and use actual owners */
1542   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1543   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1544   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1545   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1546   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1547   for (n = 0; n < numNeighbors; ++n) {
1548     PetscInt numPoints;
1549 
1550     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1551     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1552   }
1553   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1554   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1555   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1556   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1557   for (n = 0; n < numNeighbors; ++n) {
1558     IS              pointIS;
1559     const PetscInt *points;
1560     PetscInt        off, numPoints, p;
1561 
1562     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1563     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1564     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1565     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1566     for (p = 0; p < numPoints; ++p) {
1567       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1568       else       {l = -1;}
1569       if (l >= 0) {rootPoints[off+p] = remote[l];}
1570       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1571     }
1572     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1573     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1574   }
1575   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1576   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1577   /* Communicate overlap */
1578   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1579   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1580   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1581   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1582   for (p = 0; p < size; p++) {
1583     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1584   }
1585   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1586   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1587   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1588   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1589   PetscFunctionReturn(0);
1590 }
1591 
1592 #undef __FUNCT__
1593 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1594 /*@
1595   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1596 
1597   Input Parameters:
1598 + dm    - The DM
1599 . label - DMLabel assinging ranks to remote roots
1600 
1601   Output Parameter:
1602 - sf    - The star forest communication context encapsulating the defined mapping
1603 
1604   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1605 
1606   Level: developer
1607 
1608 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1609 @*/
1610 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1611 {
1612   PetscMPIInt     rank, numProcs;
1613   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1614   PetscSFNode    *remotePoints;
1615   IS              remoteRootIS;
1616   const PetscInt *remoteRoots;
1617   PetscErrorCode ierr;
1618 
1619   PetscFunctionBegin;
1620   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1621   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1622 
1623   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1624     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1625     numRemote += numPoints;
1626   }
1627   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1628   /* Put owned points first */
1629   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1630   if (numPoints > 0) {
1631     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1632     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1633     for (p = 0; p < numPoints; p++) {
1634       remotePoints[idx].index = remoteRoots[p];
1635       remotePoints[idx].rank = rank;
1636       idx++;
1637     }
1638     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1639     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1640   }
1641   /* Now add remote points */
1642   for (n = 0; n < numProcs; ++n) {
1643     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1644     if (numPoints <= 0 || n == rank) continue;
1645     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1646     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1647     for (p = 0; p < numPoints; p++) {
1648       remotePoints[idx].index = remoteRoots[p];
1649       remotePoints[idx].rank = n;
1650       idx++;
1651     }
1652     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1653     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1654   }
1655   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1656   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1657   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1658   PetscFunctionReturn(0);
1659 }
1660