xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 555a9cf8977bc36650a8aebd5df49b86389a659d)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
111   if (nroots > 0) {
112     ISLocalToGlobalMapping ltogCells;
113     PetscInt n, size, *cells_arr;
114     /* In parallel, apply a global cell numbering to the graph */
115     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
119     /* Convert to positive global cell numbers */
120     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
121     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
124   }
125   if (adjacency) *adjacency = graph;
126   /* Clean up */
127   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
128   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
129   ierr = PetscFree(adj);CHKERRQ(ierr);
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "DMPlexCreateNeighborCSR"
135 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136 {
137   const PetscInt maxFaceCases = 30;
138   PetscInt       numFaceCases = 0;
139   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140   PetscInt      *off, *adj;
141   PetscInt      *neighborCells = NULL;
142   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   /* For parallel partitioning, I think you have to communicate supports */
147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
148   cellDim = dim - cellHeight;
149   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
150   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
151   if (cEnd - cStart == 0) {
152     if (numVertices) *numVertices = 0;
153     if (offsets)   *offsets   = NULL;
154     if (adjacency) *adjacency = NULL;
155     PetscFunctionReturn(0);
156   }
157   numCells  = cEnd - cStart;
158   faceDepth = depth - cellHeight;
159   if (dim == depth) {
160     PetscInt f, fStart, fEnd;
161 
162     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
163     /* Count neighboring cells */
164     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
165     for (f = fStart; f < fEnd; ++f) {
166       const PetscInt *support;
167       PetscInt        supportSize;
168       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
169       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
170       if (supportSize == 2) {
171         PetscInt numChildren;
172 
173         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
174         if (!numChildren) {
175           ++off[support[0]-cStart+1];
176           ++off[support[1]-cStart+1];
177         }
178       }
179     }
180     /* Prefix sum */
181     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182     if (adjacency) {
183       PetscInt *tmp;
184 
185       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
186       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
187       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
188       /* Get neighboring cells */
189       for (f = fStart; f < fEnd; ++f) {
190         const PetscInt *support;
191         PetscInt        supportSize;
192         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
193         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
194         if (supportSize == 2) {
195           PetscInt numChildren;
196 
197           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
198           if (!numChildren) {
199             adj[tmp[support[0]-cStart]++] = support[1];
200             adj[tmp[support[1]-cStart]++] = support[0];
201           }
202         }
203       }
204 #if defined(PETSC_USE_DEBUG)
205       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206 #endif
207       ierr = PetscFree(tmp);CHKERRQ(ierr);
208     }
209     if (numVertices) *numVertices = numCells;
210     if (offsets)   *offsets   = off;
211     if (adjacency) *adjacency = adj;
212     PetscFunctionReturn(0);
213   }
214   /* Setup face recognition */
215   if (faceDepth == 1) {
216     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217 
218     for (c = cStart; c < cEnd; ++c) {
219       PetscInt corners;
220 
221       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
222       if (!cornersSeen[corners]) {
223         PetscInt nFV;
224 
225         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226         cornersSeen[corners] = 1;
227 
228         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
229 
230         numFaceVertices[numFaceCases++] = nFV;
231       }
232     }
233   }
234   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
235   /* Count neighboring cells */
236   for (cell = cStart; cell < cEnd; ++cell) {
237     PetscInt numNeighbors = PETSC_DETERMINE, n;
238 
239     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
240     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241     for (n = 0; n < numNeighbors; ++n) {
242       PetscInt        cellPair[2];
243       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244       PetscInt        meetSize = 0;
245       const PetscInt *meet    = NULL;
246 
247       cellPair[0] = cell; cellPair[1] = neighborCells[n];
248       if (cellPair[0] == cellPair[1]) continue;
249       if (!found) {
250         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
251         if (meetSize) {
252           PetscInt f;
253 
254           for (f = 0; f < numFaceCases; ++f) {
255             if (numFaceVertices[f] == meetSize) {
256               found = PETSC_TRUE;
257               break;
258             }
259           }
260         }
261         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
262       }
263       if (found) ++off[cell-cStart+1];
264     }
265   }
266   /* Prefix sum */
267   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268 
269   if (adjacency) {
270     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
271     /* Get neighboring cells */
272     for (cell = cStart; cell < cEnd; ++cell) {
273       PetscInt numNeighbors = PETSC_DETERMINE, n;
274       PetscInt cellOffset   = 0;
275 
276       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
277       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278       for (n = 0; n < numNeighbors; ++n) {
279         PetscInt        cellPair[2];
280         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281         PetscInt        meetSize = 0;
282         const PetscInt *meet    = NULL;
283 
284         cellPair[0] = cell; cellPair[1] = neighborCells[n];
285         if (cellPair[0] == cellPair[1]) continue;
286         if (!found) {
287           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
288           if (meetSize) {
289             PetscInt f;
290 
291             for (f = 0; f < numFaceCases; ++f) {
292               if (numFaceVertices[f] == meetSize) {
293                 found = PETSC_TRUE;
294                 break;
295               }
296             }
297           }
298           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
299         }
300         if (found) {
301           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302           ++cellOffset;
303         }
304       }
305     }
306   }
307   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
308   if (numVertices) *numVertices = numCells;
309   if (offsets)   *offsets   = off;
310   if (adjacency) *adjacency = adj;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PetscPartitionerRegister"
316 /*@C
317   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
318 
319   Not Collective
320 
321   Input Parameters:
322 + name        - The name of a new user-defined creation routine
323 - create_func - The creation routine itself
324 
325   Notes:
326   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
327 
328   Sample usage:
329 .vb
330     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
331 .ve
332 
333   Then, your PetscPartitioner type can be chosen with the procedural interface via
334 .vb
335     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
336     PetscPartitionerSetType(PetscPartitioner, "my_part");
337 .ve
338    or at runtime via the option
339 .vb
340     -petscpartitioner_type my_part
341 .ve
342 
343   Level: advanced
344 
345 .keywords: PetscPartitioner, register
346 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
347 
348 @*/
349 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
350 {
351   PetscErrorCode ierr;
352 
353   PetscFunctionBegin;
354   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PetscPartitionerSetType"
360 /*@C
361   PetscPartitionerSetType - Builds a particular PetscPartitioner
362 
363   Collective on PetscPartitioner
364 
365   Input Parameters:
366 + part - The PetscPartitioner object
367 - name - The kind of partitioner
368 
369   Options Database Key:
370 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
371 
372   Level: intermediate
373 
374 .keywords: PetscPartitioner, set, type
375 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
376 @*/
377 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
378 {
379   PetscErrorCode (*r)(PetscPartitioner);
380   PetscBool      match;
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
385   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
386   if (match) PetscFunctionReturn(0);
387 
388   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
389   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
390   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
391 
392   if (part->ops->destroy) {
393     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
394     part->ops->destroy = NULL;
395   }
396   ierr = (*r)(part);CHKERRQ(ierr);
397   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
398   PetscFunctionReturn(0);
399 }
400 
401 #undef __FUNCT__
402 #define __FUNCT__ "PetscPartitionerGetType"
403 /*@C
404   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
405 
406   Not Collective
407 
408   Input Parameter:
409 . part - The PetscPartitioner
410 
411   Output Parameter:
412 . name - The PetscPartitioner type name
413 
414   Level: intermediate
415 
416 .keywords: PetscPartitioner, get, type, name
417 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
418 @*/
419 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
420 {
421   PetscErrorCode ierr;
422 
423   PetscFunctionBegin;
424   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
425   PetscValidPointer(name, 2);
426   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
427   *name = ((PetscObject) part)->type_name;
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "PetscPartitionerView"
433 /*@C
434   PetscPartitionerView - Views a PetscPartitioner
435 
436   Collective on PetscPartitioner
437 
438   Input Parameter:
439 + part - the PetscPartitioner object to view
440 - v    - the viewer
441 
442   Level: developer
443 
444 .seealso: PetscPartitionerDestroy()
445 @*/
446 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
447 {
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
452   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
453   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
454   PetscFunctionReturn(0);
455 }
456 
457 #undef __FUNCT__
458 #define __FUNCT__ "PetscPartitionerViewFromOptions"
459 /*
460   PetscPartitionerViewFromOptions - Processes command line options to determine if/how a PetscPartitioner is to be viewed.
461 
462   Collective on PetscPartitioner
463 
464   Input Parameters:
465 + part   - the PetscPartitioner
466 . prefix - prefix to use for viewing, or NULL
467 - optionname - option to activate viewing
468 
469   Level: intermediate
470 
471 .keywords: PetscPartitioner, view, options, database
472 .seealso: VecViewFromOptions(), MatViewFromOptions()
473 */
474 PetscErrorCode PetscPartitionerViewFromOptions(PetscPartitioner part, const char prefix[], const char optionname[])
475 {
476   PetscViewer       viewer;
477   PetscViewerFormat format;
478   PetscBool         flg;
479   PetscErrorCode    ierr;
480 
481   PetscFunctionBegin;
482   if (prefix) {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), prefix,                       optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
483   else        {ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject) part), ((PetscObject) part)->prefix, optionname, &viewer, &format, &flg);CHKERRQ(ierr);}
484   if (flg) {
485     ierr = PetscViewerPushFormat(viewer, format);CHKERRQ(ierr);
486     ierr = PetscPartitionerView(part, viewer);CHKERRQ(ierr);
487     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
488     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
489   }
490   PetscFunctionReturn(0);
491 }
492 #undef __FUNCT__
493 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
494 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
495 {
496   const char    *defaultType;
497   char           name[256];
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
503   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
504   else                                  defaultType = ((PetscObject) part)->type_name;
505   if (!PetscPartitionerRegisterAllCalled) {ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);}
506 
507   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
508   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
509   if (flg) {
510     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
511   } else if (!((PetscObject) part)->type_name) {
512     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
513   }
514   ierr = PetscOptionsEnd();CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 #undef __FUNCT__
519 #define __FUNCT__ "PetscPartitionerSetFromOptions"
520 /*@
521   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
522 
523   Collective on PetscPartitioner
524 
525   Input Parameter:
526 . part - the PetscPartitioner object to set options for
527 
528   Level: developer
529 
530 .seealso: PetscPartitionerView()
531 @*/
532 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
533 {
534   PetscErrorCode ierr;
535 
536   PetscFunctionBegin;
537   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
538   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
539 
540   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
541   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
542   /* process any options handlers added with PetscObjectAddOptionsHandler() */
543   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
544   ierr = PetscOptionsEnd();CHKERRQ(ierr);
545   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
546   PetscFunctionReturn(0);
547 }
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "PetscPartitionerSetUp"
551 /*@C
552   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
553 
554   Collective on PetscPartitioner
555 
556   Input Parameter:
557 . part - the PetscPartitioner object to setup
558 
559   Level: developer
560 
561 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
562 @*/
563 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
564 {
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
569   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
570   PetscFunctionReturn(0);
571 }
572 
573 #undef __FUNCT__
574 #define __FUNCT__ "PetscPartitionerDestroy"
575 /*@
576   PetscPartitionerDestroy - Destroys a PetscPartitioner object
577 
578   Collective on PetscPartitioner
579 
580   Input Parameter:
581 . part - the PetscPartitioner object to destroy
582 
583   Level: developer
584 
585 .seealso: PetscPartitionerView()
586 @*/
587 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
588 {
589   PetscErrorCode ierr;
590 
591   PetscFunctionBegin;
592   if (!*part) PetscFunctionReturn(0);
593   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
594 
595   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
596   ((PetscObject) (*part))->refct = 0;
597 
598   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
599   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
600   PetscFunctionReturn(0);
601 }
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "PetscPartitionerCreate"
605 /*@
606   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
607 
608   Collective on MPI_Comm
609 
610   Input Parameter:
611 . comm - The communicator for the PetscPartitioner object
612 
613   Output Parameter:
614 . part - The PetscPartitioner object
615 
616   Level: beginner
617 
618 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE
619 @*/
620 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
621 {
622   PetscPartitioner p;
623   PetscErrorCode   ierr;
624 
625   PetscFunctionBegin;
626   PetscValidPointer(part, 2);
627   *part = NULL;
628   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
629 
630   ierr = PetscHeaderCreate(p, _p_PetscPartitioner, struct _PetscPartitionerOps, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
631   ierr = PetscMemzero(p->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
632 
633   *part = p;
634   PetscFunctionReturn(0);
635 }
636 
637 #undef __FUNCT__
638 #define __FUNCT__ "PetscPartitionerPartition"
639 /*@
640   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
641 
642   Collective on DM
643 
644   Input Parameters:
645 + part    - The PetscPartitioner
646 - dm      - The mesh DM
647 
648   Output Parameters:
649 + partSection     - The PetscSection giving the division of points by partition
650 - partition       - The list of points by partition
651 
652   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
653 
654   Level: developer
655 
656 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
657 @*/
658 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
659 {
660   PetscMPIInt    size;
661   PetscErrorCode ierr;
662 
663   PetscFunctionBegin;
664   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
665   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
666   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
667   PetscValidPointer(partition, 5);
668   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
669   if (size == 1) {
670     PetscInt *points;
671     PetscInt  cStart, cEnd, c;
672 
673     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
674     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
675     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
676     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
677     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
678     for (c = cStart; c < cEnd; ++c) points[c] = c;
679     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
680     PetscFunctionReturn(0);
681   }
682   if (part->height == 0) {
683     PetscInt  numVertices;
684     PetscInt *start     = NULL;
685     PetscInt *adjacency = NULL;
686 
687     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
688     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
689     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
690     ierr = PetscFree(start);CHKERRQ(ierr);
691     ierr = PetscFree(adjacency);CHKERRQ(ierr);
692   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
693   PetscFunctionReturn(0);
694 }
695 
696 #undef __FUNCT__
697 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
698 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
699 {
700   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
701   PetscErrorCode          ierr;
702 
703   PetscFunctionBegin;
704   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
705   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
706   ierr = PetscFree(p);CHKERRQ(ierr);
707   PetscFunctionReturn(0);
708 }
709 
710 #undef __FUNCT__
711 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
712 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
713 {
714   PetscViewerFormat format;
715   PetscErrorCode    ierr;
716 
717   PetscFunctionBegin;
718   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
719   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
720   PetscFunctionReturn(0);
721 }
722 
723 #undef __FUNCT__
724 #define __FUNCT__ "PetscPartitionerView_Shell"
725 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
726 {
727   PetscBool      iascii;
728   PetscErrorCode ierr;
729 
730   PetscFunctionBegin;
731   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
732   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
733   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
734   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
735   PetscFunctionReturn(0);
736 }
737 
738 #undef __FUNCT__
739 #define __FUNCT__ "PetscPartitionerPartition_Shell"
740 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
741 {
742   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
743   PetscInt                np;
744   PetscErrorCode          ierr;
745 
746   PetscFunctionBegin;
747   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
748   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
749   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
750   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
751   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
752   *partition = p->partition;
753   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
754   PetscFunctionReturn(0);
755 }
756 
757 #undef __FUNCT__
758 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
759 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
760 {
761   PetscFunctionBegin;
762   part->ops->view      = PetscPartitionerView_Shell;
763   part->ops->destroy   = PetscPartitionerDestroy_Shell;
764   part->ops->partition = PetscPartitionerPartition_Shell;
765   PetscFunctionReturn(0);
766 }
767 
768 /*MC
769   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
770 
771   Level: intermediate
772 
773 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
774 M*/
775 
776 #undef __FUNCT__
777 #define __FUNCT__ "PetscPartitionerCreate_Shell"
778 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
779 {
780   PetscPartitioner_Shell *p;
781   PetscErrorCode          ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
785   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
786   part->data = p;
787 
788   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
789   PetscFunctionReturn(0);
790 }
791 
792 #undef __FUNCT__
793 #define __FUNCT__ "PetscPartitionerShellSetPartition"
794 /*@C
795   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
796 
797   Collective on PART
798 
799   Input Parameters:
800 + part     - The PetscPartitioner
801 . numProcs - The number of partitions
802 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
803 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
804 
805   Level: developer
806 
807   Notes:
808 
809     It is safe to free the sizes and points arrays after use in this routine.
810 
811 .seealso DMPlexDistribute(), PetscPartitionerCreate()
812 @*/
813 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
814 {
815   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
816   PetscInt                proc, numPoints;
817   PetscErrorCode          ierr;
818 
819   PetscFunctionBegin;
820   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
821   if (sizes) {PetscValidPointer(sizes, 3);}
822   if (sizes) {PetscValidPointer(points, 4);}
823   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
824   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
825   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
826   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
827   if (sizes) {
828     for (proc = 0; proc < numProcs; ++proc) {
829       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
830     }
831   }
832   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
833   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
834   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
840 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
841 {
842   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
843   PetscErrorCode          ierr;
844 
845   PetscFunctionBegin;
846   ierr = PetscFree(p);CHKERRQ(ierr);
847   PetscFunctionReturn(0);
848 }
849 
850 #undef __FUNCT__
851 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
852 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
853 {
854   PetscViewerFormat format;
855   PetscErrorCode    ierr;
856 
857   PetscFunctionBegin;
858   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
859   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "PetscPartitionerView_Simple"
865 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
866 {
867   PetscBool      iascii;
868   PetscErrorCode ierr;
869 
870   PetscFunctionBegin;
871   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
872   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
873   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
874   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
875   PetscFunctionReturn(0);
876 }
877 
878 #undef __FUNCT__
879 #define __FUNCT__ "PetscPartitionerPartition_Simple"
880 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
881 {
882   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
883   PetscInt                 np;
884   PetscErrorCode           ierr;
885 
886   PetscFunctionBegin;
887   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
888   for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
889   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
890   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 #undef __FUNCT__
895 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
896 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
897 {
898   PetscFunctionBegin;
899   part->ops->view      = PetscPartitionerView_Simple;
900   part->ops->destroy   = PetscPartitionerDestroy_Simple;
901   part->ops->partition = PetscPartitionerPartition_Simple;
902   PetscFunctionReturn(0);
903 }
904 
905 /*MC
906   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
907 
908   Level: intermediate
909 
910 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
911 M*/
912 
913 #undef __FUNCT__
914 #define __FUNCT__ "PetscPartitionerCreate_Simple"
915 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
916 {
917   PetscPartitioner_Simple *p;
918   PetscErrorCode           ierr;
919 
920   PetscFunctionBegin;
921   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
922   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
923   part->data = p;
924 
925   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
926   PetscFunctionReturn(0);
927 }
928 
929 #undef __FUNCT__
930 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
931 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
932 {
933   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
934   PetscErrorCode          ierr;
935 
936   PetscFunctionBegin;
937   ierr = PetscFree(p);CHKERRQ(ierr);
938   PetscFunctionReturn(0);
939 }
940 
941 #undef __FUNCT__
942 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
943 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
944 {
945   PetscViewerFormat format;
946   PetscErrorCode    ierr;
947 
948   PetscFunctionBegin;
949   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
950   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 #undef __FUNCT__
955 #define __FUNCT__ "PetscPartitionerView_Chaco"
956 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
957 {
958   PetscBool      iascii;
959   PetscErrorCode ierr;
960 
961   PetscFunctionBegin;
962   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
963   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
964   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
965   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
966   PetscFunctionReturn(0);
967 }
968 
969 #if defined(PETSC_HAVE_CHACO)
970 #if defined(PETSC_HAVE_UNISTD_H)
971 #include <unistd.h>
972 #endif
973 /* Chaco does not have an include file */
974 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
975                        float *ewgts, float *x, float *y, float *z, char *outassignname,
976                        char *outfilename, short *assignment, int architecture, int ndims_tot,
977                        int mesh_dims[3], double *goal, int global_method, int local_method,
978                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
979 
980 extern int FREE_GRAPH;
981 #endif
982 
983 #undef __FUNCT__
984 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
985 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
986 {
987 #if defined(PETSC_HAVE_CHACO)
988   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
989   MPI_Comm       comm;
990   int            nvtxs          = numVertices; /* number of vertices in full graph */
991   int           *vwgts          = NULL;   /* weights for all vertices */
992   float         *ewgts          = NULL;   /* weights for all edges */
993   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
994   char          *outassignname  = NULL;   /*  name of assignment output file */
995   char          *outfilename    = NULL;   /* output file name */
996   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
997   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
998   int            mesh_dims[3];            /* dimensions of mesh of processors */
999   double        *goal          = NULL;    /* desired set sizes for each set */
1000   int            global_method = 1;       /* global partitioning algorithm */
1001   int            local_method  = 1;       /* local partitioning algorithm */
1002   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1003   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1004   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1005   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1006   long           seed          = 123636512; /* for random graph mutations */
1007   short int     *assignment;              /* Output partition */
1008   int            fd_stdout, fd_pipe[2];
1009   PetscInt      *points;
1010   int            i, v, p;
1011   PetscErrorCode ierr;
1012 
1013   PetscFunctionBegin;
1014   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1015   if (!numVertices) {
1016     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1017     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1018     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1019     PetscFunctionReturn(0);
1020   }
1021   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1022   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1023 
1024   if (global_method == INERTIAL_METHOD) {
1025     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1026     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1027   }
1028   mesh_dims[0] = nparts;
1029   mesh_dims[1] = 1;
1030   mesh_dims[2] = 1;
1031   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1032   /* Chaco outputs to stdout. We redirect this to a buffer. */
1033   /* TODO: check error codes for UNIX calls */
1034 #if defined(PETSC_HAVE_UNISTD_H)
1035   {
1036     int piperet;
1037     piperet = pipe(fd_pipe);
1038     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1039     fd_stdout = dup(1);
1040     close(1);
1041     dup2(fd_pipe[1], 1);
1042   }
1043 #endif
1044   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1045                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1046                    vmax, ndims, eigtol, seed);
1047 #if defined(PETSC_HAVE_UNISTD_H)
1048   {
1049     char msgLog[10000];
1050     int  count;
1051 
1052     fflush(stdout);
1053     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1054     if (count < 0) count = 0;
1055     msgLog[count] = 0;
1056     close(1);
1057     dup2(fd_stdout, 1);
1058     close(fd_stdout);
1059     close(fd_pipe[0]);
1060     close(fd_pipe[1]);
1061     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1062   }
1063 #endif
1064   /* Convert to PetscSection+IS */
1065   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1066   for (v = 0; v < nvtxs; ++v) {
1067     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1068   }
1069   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1070   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1071   for (p = 0, i = 0; p < nparts; ++p) {
1072     for (v = 0; v < nvtxs; ++v) {
1073       if (assignment[v] == p) points[i++] = v;
1074     }
1075   }
1076   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1077   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1078   if (global_method == INERTIAL_METHOD) {
1079     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1080   }
1081   ierr = PetscFree(assignment);CHKERRQ(ierr);
1082   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1083   PetscFunctionReturn(0);
1084 #else
1085   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1086 #endif
1087 }
1088 
1089 #undef __FUNCT__
1090 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1091 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1092 {
1093   PetscFunctionBegin;
1094   part->ops->view      = PetscPartitionerView_Chaco;
1095   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1096   part->ops->partition = PetscPartitionerPartition_Chaco;
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 /*MC
1101   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1102 
1103   Level: intermediate
1104 
1105 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1106 M*/
1107 
1108 #undef __FUNCT__
1109 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1110 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1111 {
1112   PetscPartitioner_Chaco *p;
1113   PetscErrorCode          ierr;
1114 
1115   PetscFunctionBegin;
1116   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1117   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1118   part->data = p;
1119 
1120   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1121   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1122   PetscFunctionReturn(0);
1123 }
1124 
1125 #undef __FUNCT__
1126 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1127 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1128 {
1129   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1130   PetscErrorCode             ierr;
1131 
1132   PetscFunctionBegin;
1133   ierr = PetscFree(p);CHKERRQ(ierr);
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 #undef __FUNCT__
1138 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1139 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1140 {
1141   PetscViewerFormat format;
1142   PetscErrorCode    ierr;
1143 
1144   PetscFunctionBegin;
1145   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1146   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1147   PetscFunctionReturn(0);
1148 }
1149 
1150 #undef __FUNCT__
1151 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1152 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1153 {
1154   PetscBool      iascii;
1155   PetscErrorCode ierr;
1156 
1157   PetscFunctionBegin;
1158   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1159   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1160   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1161   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1162   PetscFunctionReturn(0);
1163 }
1164 
1165 #if defined(PETSC_HAVE_PARMETIS)
1166 #include <parmetis.h>
1167 #endif
1168 
1169 #undef __FUNCT__
1170 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1171 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1172 {
1173 #if defined(PETSC_HAVE_PARMETIS)
1174   MPI_Comm       comm;
1175   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1176   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1177   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1178   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1179   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1180   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1181   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1182   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1183   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1184   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1185   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1186   PetscInt       options[5];               /* Options */
1187   /* Outputs */
1188   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1189   PetscInt      *assignment, *points;
1190   PetscMPIInt    rank, p, v, i;
1191   PetscErrorCode ierr;
1192 
1193   PetscFunctionBegin;
1194   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1195   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1196   options[0] = 0; /* Use all defaults */
1197   /* Calculate vertex distribution */
1198   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1199   vtxdist[0] = 0;
1200   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1201   for (p = 2; p <= nparts; ++p) {
1202     vtxdist[p] += vtxdist[p-1];
1203   }
1204   /* Calculate weights */
1205   for (p = 0; p < nparts; ++p) {
1206     tpwgts[p] = 1.0/nparts;
1207   }
1208   ubvec[0] = 1.05;
1209 
1210   if (nparts == 1) {
1211     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1212   } else {
1213     if (vtxdist[1] == vtxdist[nparts]) {
1214       if (!rank) {
1215         PetscStackPush("METIS_PartGraphKway");
1216         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1217         PetscStackPop;
1218         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1219       }
1220     } else {
1221       PetscStackPush("ParMETIS_V3_PartKway");
1222       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1223       PetscStackPop;
1224       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1225     }
1226   }
1227   /* Convert to PetscSection+IS */
1228   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1229   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1230   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1231   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1232   for (p = 0, i = 0; p < nparts; ++p) {
1233     for (v = 0; v < nvtxs; ++v) {
1234       if (assignment[v] == p) points[i++] = v;
1235     }
1236   }
1237   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1238   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1239   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1240 #else
1241   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1242 #endif
1243   PetscFunctionReturn(0);
1244 }
1245 
1246 #undef __FUNCT__
1247 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1248 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1249 {
1250   PetscFunctionBegin;
1251   part->ops->view      = PetscPartitionerView_ParMetis;
1252   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1253   part->ops->partition = PetscPartitionerPartition_ParMetis;
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 /*MC
1258   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1259 
1260   Level: intermediate
1261 
1262 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1263 M*/
1264 
1265 #undef __FUNCT__
1266 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1267 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1268 {
1269   PetscPartitioner_ParMetis *p;
1270   PetscErrorCode          ierr;
1271 
1272   PetscFunctionBegin;
1273   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1274   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1275   part->data = p;
1276 
1277   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1278   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1279   PetscFunctionReturn(0);
1280 }
1281 
1282 #undef __FUNCT__
1283 #define __FUNCT__ "DMPlexGetPartitioner"
1284 /*@
1285   DMPlexGetPartitioner - Get the mesh partitioner
1286 
1287   Not collective
1288 
1289   Input Parameter:
1290 . dm - The DM
1291 
1292   Output Parameter:
1293 . part - The PetscPartitioner
1294 
1295   Level: developer
1296 
1297   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1298 
1299 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1300 @*/
1301 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1302 {
1303   DM_Plex *mesh = (DM_Plex *) dm->data;
1304 
1305   PetscFunctionBegin;
1306   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1307   PetscValidPointer(part, 2);
1308   *part = mesh->partitioner;
1309   PetscFunctionReturn(0);
1310 }
1311 
1312 #undef __FUNCT__
1313 #define __FUNCT__ "DMPlexSetPartitioner"
1314 /*@
1315   DMPlexSetPartitioner - Set the mesh partitioner
1316 
1317   logically collective on dm and part
1318 
1319   Input Parameters:
1320 + dm - The DM
1321 - part - The partitioner
1322 
1323   Level: developer
1324 
1325   Note: Any existing PetscPartitioner will be destroyed.
1326 
1327 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1328 @*/
1329 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1330 {
1331   DM_Plex       *mesh = (DM_Plex *) dm->data;
1332   PetscErrorCode ierr;
1333 
1334   PetscFunctionBegin;
1335   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1336   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1337   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1338   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1339   mesh->partitioner = part;
1340   PetscFunctionReturn(0);
1341 }
1342 
1343 #undef __FUNCT__
1344 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1345 /*@
1346   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1347 
1348   Input Parameters:
1349 + dm     - The DM
1350 - label  - DMLabel assinging ranks to remote roots
1351 
1352   Level: developer
1353 
1354 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1355 @*/
1356 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1357 {
1358   IS              rankIS,   pointIS;
1359   const PetscInt *ranks,   *points;
1360   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1361   PetscInt       *closure = NULL;
1362   PetscErrorCode  ierr;
1363 
1364   PetscFunctionBegin;
1365   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1366   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1367   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1368   for (r = 0; r < numRanks; ++r) {
1369     const PetscInt rank = ranks[r];
1370 
1371     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1372     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1373     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1374     for (p = 0; p < numPoints; ++p) {
1375       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1376       for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);}
1377     }
1378     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1379     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1380   }
1381   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1382   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1383   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1384   PetscFunctionReturn(0);
1385 }
1386 
1387 #undef __FUNCT__
1388 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1389 /*@
1390   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1391 
1392   Input Parameters:
1393 + dm     - The DM
1394 - label  - DMLabel assinging ranks to remote roots
1395 
1396   Level: developer
1397 
1398 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1399 @*/
1400 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1401 {
1402   IS              rankIS,   pointIS;
1403   const PetscInt *ranks,   *points;
1404   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1405   PetscInt       *adj = NULL;
1406   PetscErrorCode  ierr;
1407 
1408   PetscFunctionBegin;
1409   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1410   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1411   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1412   for (r = 0; r < numRanks; ++r) {
1413     const PetscInt rank = ranks[r];
1414 
1415     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1416     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1417     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1418     for (p = 0; p < numPoints; ++p) {
1419       adjSize = PETSC_DETERMINE;
1420       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1421       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1422     }
1423     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1424     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1425   }
1426   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1427   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1428   ierr = PetscFree(adj);CHKERRQ(ierr);
1429   PetscFunctionReturn(0);
1430 }
1431 
1432 #undef __FUNCT__
1433 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1434 /*@
1435   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1436 
1437   Input Parameters:
1438 + dm        - The DM
1439 . rootLabel - DMLabel assinging ranks to local roots
1440 . processSF - A star forest mapping into the local index on each remote rank
1441 
1442   Output Parameter:
1443 - leafLabel - DMLabel assinging ranks to remote roots
1444 
1445   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1446   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1447 
1448   Level: developer
1449 
1450 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1451 @*/
1452 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1453 {
1454   MPI_Comm           comm;
1455   PetscMPIInt        rank, numProcs;
1456   PetscInt           p, n, numNeighbors, size, l, nleaves;
1457   PetscSF            sfPoint;
1458   PetscSFNode       *rootPoints, *leafPoints;
1459   PetscSection       rootSection, leafSection;
1460   const PetscSFNode *remote;
1461   const PetscInt    *local, *neighbors;
1462   IS                 valueIS;
1463   PetscErrorCode     ierr;
1464 
1465   PetscFunctionBegin;
1466   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1467   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1468   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1469   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1470 
1471   /* Convert to (point, rank) and use actual owners */
1472   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1473   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1474   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1475   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1476   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1477   for (n = 0; n < numNeighbors; ++n) {
1478     PetscInt numPoints;
1479 
1480     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1481     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1482   }
1483   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1484   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1485   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1486   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1487   for (n = 0; n < numNeighbors; ++n) {
1488     IS              pointIS;
1489     const PetscInt *points;
1490     PetscInt        off, numPoints, p;
1491 
1492     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1493     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1494     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1495     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1496     for (p = 0; p < numPoints; ++p) {
1497       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1498       else       {l = -1;}
1499       if (l >= 0) {rootPoints[off+p] = remote[l];}
1500       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1501     }
1502     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1503     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1504   }
1505   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1506   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1507   /* Communicate overlap */
1508   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1509   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1510   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1511   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1512   for (p = 0; p < size; p++) {
1513     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1514   }
1515   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1516   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1517   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1518   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1519   PetscFunctionReturn(0);
1520 }
1521 
1522 #undef __FUNCT__
1523 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1524 /*@
1525   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1526 
1527   Input Parameters:
1528 + dm    - The DM
1529 . label - DMLabel assinging ranks to remote roots
1530 
1531   Output Parameter:
1532 - sf    - The star forest communication context encapsulating the defined mapping
1533 
1534   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1535 
1536   Level: developer
1537 
1538 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1539 @*/
1540 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1541 {
1542   PetscMPIInt     rank, numProcs;
1543   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1544   PetscSFNode    *remotePoints;
1545   IS              remoteRootIS;
1546   const PetscInt *remoteRoots;
1547   PetscErrorCode ierr;
1548 
1549   PetscFunctionBegin;
1550   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1551   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1552 
1553   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1554     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1555     numRemote += numPoints;
1556   }
1557   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1558   /* Put owned points first */
1559   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1560   if (numPoints > 0) {
1561     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1562     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1563     for (p = 0; p < numPoints; p++) {
1564       remotePoints[idx].index = remoteRoots[p];
1565       remotePoints[idx].rank = rank;
1566       idx++;
1567     }
1568     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1569     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1570   }
1571   /* Now add remote points */
1572   for (n = 0; n < numProcs; ++n) {
1573     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1574     if (numPoints <= 0 || n == rank) continue;
1575     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1576     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1577     for (p = 0; p < numPoints; p++) {
1578       remotePoints[idx].index = remoteRoots[p];
1579       remotePoints[idx].rank = n;
1580       idx++;
1581     }
1582     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1583     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1584   }
1585   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1586   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1587   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1588   PetscFunctionReturn(0);
1589 }
1590