xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 95dccacacae8a8fc0b691f9b4fba69a249b61188)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
79   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
80   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
81     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
82     if (nroots > 0) {if (cellNum[p] < 0) continue;}
83     adjSize = PETSC_DETERMINE;
84     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
85     for (a = 0; a < adjSize; ++a) {
86       const PetscInt point = adj[a];
87       if (point != p && pStart <= point && point < pEnd) {
88         PetscInt *PETSC_RESTRICT pBuf;
89         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
90         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
91         *pBuf = point;
92       }
93     }
94     (*numVertices)++;
95   }
96   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
97   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
98   /* Derive CSR graph from section/segbuffer */
99   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
100   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
101   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
102   for (idx = 0, p = pStart; p < pEnd; p++) {
103     if (nroots > 0) {if (cellNum[p] < 0) continue;}
104     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
105   }
106   vOffsets[*numVertices] = size;
107   if (offsets) *offsets = vOffsets;
108   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
109   {
110     ISLocalToGlobalMapping ltogCells;
111     PetscInt n, size, *cells_arr;
112     /* In parallel, apply a global cell numbering to the graph */
113     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
114     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
115     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
117     /* Convert to positive global cell numbers */
118     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
119     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
121     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
122     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
123   }
124   if (adjacency) *adjacency = graph;
125   /* Clean up */
126   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
127   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
128   ierr = PetscFree(adj);CHKERRQ(ierr);
129   PetscFunctionReturn(0);
130 }
131 
132 #undef __FUNCT__
133 #define __FUNCT__ "DMPlexCreateNeighborCSR"
134 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
135 {
136   const PetscInt maxFaceCases = 30;
137   PetscInt       numFaceCases = 0;
138   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
139   PetscInt      *off, *adj;
140   PetscInt      *neighborCells = NULL;
141   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   /* For parallel partitioning, I think you have to communicate supports */
146   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
147   cellDim = dim - cellHeight;
148   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
149   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
150   if (cEnd - cStart == 0) {
151     if (numVertices) *numVertices = 0;
152     if (offsets)   *offsets   = NULL;
153     if (adjacency) *adjacency = NULL;
154     PetscFunctionReturn(0);
155   }
156   numCells  = cEnd - cStart;
157   faceDepth = depth - cellHeight;
158   if (dim == depth) {
159     PetscInt f, fStart, fEnd;
160 
161     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
162     /* Count neighboring cells */
163     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
164     for (f = fStart; f < fEnd; ++f) {
165       const PetscInt *support;
166       PetscInt        supportSize;
167       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
168       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
169       if (supportSize == 2) {
170         PetscInt numChildren;
171 
172         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
173         if (!numChildren) {
174           ++off[support[0]-cStart+1];
175           ++off[support[1]-cStart+1];
176         }
177       }
178     }
179     /* Prefix sum */
180     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
181     if (adjacency) {
182       PetscInt *tmp;
183 
184       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
185       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
186       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
187       /* Get neighboring cells */
188       for (f = fStart; f < fEnd; ++f) {
189         const PetscInt *support;
190         PetscInt        supportSize;
191         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
192         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
193         if (supportSize == 2) {
194           PetscInt numChildren;
195 
196           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
197           if (!numChildren) {
198             adj[tmp[support[0]-cStart]++] = support[1];
199             adj[tmp[support[1]-cStart]++] = support[0];
200           }
201         }
202       }
203 #if defined(PETSC_USE_DEBUG)
204       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
205 #endif
206       ierr = PetscFree(tmp);CHKERRQ(ierr);
207     }
208     if (numVertices) *numVertices = numCells;
209     if (offsets)   *offsets   = off;
210     if (adjacency) *adjacency = adj;
211     PetscFunctionReturn(0);
212   }
213   /* Setup face recognition */
214   if (faceDepth == 1) {
215     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
216 
217     for (c = cStart; c < cEnd; ++c) {
218       PetscInt corners;
219 
220       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
221       if (!cornersSeen[corners]) {
222         PetscInt nFV;
223 
224         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
225         cornersSeen[corners] = 1;
226 
227         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
228 
229         numFaceVertices[numFaceCases++] = nFV;
230       }
231     }
232   }
233   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
234   /* Count neighboring cells */
235   for (cell = cStart; cell < cEnd; ++cell) {
236     PetscInt numNeighbors = PETSC_DETERMINE, n;
237 
238     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
239     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
240     for (n = 0; n < numNeighbors; ++n) {
241       PetscInt        cellPair[2];
242       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
243       PetscInt        meetSize = 0;
244       const PetscInt *meet    = NULL;
245 
246       cellPair[0] = cell; cellPair[1] = neighborCells[n];
247       if (cellPair[0] == cellPair[1]) continue;
248       if (!found) {
249         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
250         if (meetSize) {
251           PetscInt f;
252 
253           for (f = 0; f < numFaceCases; ++f) {
254             if (numFaceVertices[f] == meetSize) {
255               found = PETSC_TRUE;
256               break;
257             }
258           }
259         }
260         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
261       }
262       if (found) ++off[cell-cStart+1];
263     }
264   }
265   /* Prefix sum */
266   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
267 
268   if (adjacency) {
269     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
270     /* Get neighboring cells */
271     for (cell = cStart; cell < cEnd; ++cell) {
272       PetscInt numNeighbors = PETSC_DETERMINE, n;
273       PetscInt cellOffset   = 0;
274 
275       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
276       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
277       for (n = 0; n < numNeighbors; ++n) {
278         PetscInt        cellPair[2];
279         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
280         PetscInt        meetSize = 0;
281         const PetscInt *meet    = NULL;
282 
283         cellPair[0] = cell; cellPair[1] = neighborCells[n];
284         if (cellPair[0] == cellPair[1]) continue;
285         if (!found) {
286           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
287           if (meetSize) {
288             PetscInt f;
289 
290             for (f = 0; f < numFaceCases; ++f) {
291               if (numFaceVertices[f] == meetSize) {
292                 found = PETSC_TRUE;
293                 break;
294               }
295             }
296           }
297           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
298         }
299         if (found) {
300           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
301           ++cellOffset;
302         }
303       }
304     }
305   }
306   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
307   if (numVertices) *numVertices = numCells;
308   if (offsets)   *offsets   = off;
309   if (adjacency) *adjacency = adj;
310   PetscFunctionReturn(0);
311 }
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "PetscPartitionerRegister"
315 /*@C
316   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
317 
318   Not Collective
319 
320   Input Parameters:
321 + name        - The name of a new user-defined creation routine
322 - create_func - The creation routine itself
323 
324   Notes:
325   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
326 
327   Sample usage:
328 .vb
329     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
330 .ve
331 
332   Then, your PetscPartitioner type can be chosen with the procedural interface via
333 .vb
334     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
335     PetscPartitionerSetType(PetscPartitioner, "my_part");
336 .ve
337    or at runtime via the option
338 .vb
339     -petscpartitioner_type my_part
340 .ve
341 
342   Level: advanced
343 
344 .keywords: PetscPartitioner, register
345 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
346 
347 @*/
348 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
349 {
350   PetscErrorCode ierr;
351 
352   PetscFunctionBegin;
353   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
354   PetscFunctionReturn(0);
355 }
356 
357 #undef __FUNCT__
358 #define __FUNCT__ "PetscPartitionerSetType"
359 /*@C
360   PetscPartitionerSetType - Builds a particular PetscPartitioner
361 
362   Collective on PetscPartitioner
363 
364   Input Parameters:
365 + part - The PetscPartitioner object
366 - name - The kind of partitioner
367 
368   Options Database Key:
369 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
370 
371   Level: intermediate
372 
373 .keywords: PetscPartitioner, set, type
374 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
375 @*/
376 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
377 {
378   PetscErrorCode (*r)(PetscPartitioner);
379   PetscBool      match;
380   PetscErrorCode ierr;
381 
382   PetscFunctionBegin;
383   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
384   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
385   if (match) PetscFunctionReturn(0);
386 
387   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
388   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
389   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
390 
391   if (part->ops->destroy) {
392     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
393     part->ops->destroy = NULL;
394   }
395   ierr = (*r)(part);CHKERRQ(ierr);
396   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
397   PetscFunctionReturn(0);
398 }
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "PetscPartitionerGetType"
402 /*@C
403   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
404 
405   Not Collective
406 
407   Input Parameter:
408 . part - The PetscPartitioner
409 
410   Output Parameter:
411 . name - The PetscPartitioner type name
412 
413   Level: intermediate
414 
415 .keywords: PetscPartitioner, get, type, name
416 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
417 @*/
418 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
419 {
420   PetscErrorCode ierr;
421 
422   PetscFunctionBegin;
423   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
424   PetscValidPointer(name, 2);
425   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
426   *name = ((PetscObject) part)->type_name;
427   PetscFunctionReturn(0);
428 }
429 
430 #undef __FUNCT__
431 #define __FUNCT__ "PetscPartitionerView"
432 /*@C
433   PetscPartitionerView - Views a PetscPartitioner
434 
435   Collective on PetscPartitioner
436 
437   Input Parameter:
438 + part - the PetscPartitioner object to view
439 - v    - the viewer
440 
441   Level: developer
442 
443 .seealso: PetscPartitionerDestroy()
444 @*/
445 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
446 {
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
451   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
452   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
453   PetscFunctionReturn(0);
454 }
455 
456 #undef __FUNCT__
457 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
458 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
459 {
460   const char    *defaultType;
461   char           name[256];
462   PetscBool      flg;
463   PetscErrorCode ierr;
464 
465   PetscFunctionBegin;
466   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
467   if (!((PetscObject) part)->type_name)
468 #if defined(PETSC_HAVE_CHACO)
469     defaultType = PETSCPARTITIONERCHACO;
470 #elif defined(PETSC_HAVE_PARMETIS)
471     defaultType = PETSCPARTITIONERPARMETIS;
472 #else
473     defaultType = PETSCPARTITIONERSIMPLE;
474 #endif
475   else
476     defaultType = ((PetscObject) part)->type_name;
477   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
478 
479   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
480   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
481   if (flg) {
482     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
483   } else if (!((PetscObject) part)->type_name) {
484     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
485   }
486   ierr = PetscOptionsEnd();CHKERRQ(ierr);
487   PetscFunctionReturn(0);
488 }
489 
490 #undef __FUNCT__
491 #define __FUNCT__ "PetscPartitionerSetFromOptions"
492 /*@
493   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
494 
495   Collective on PetscPartitioner
496 
497   Input Parameter:
498 . part - the PetscPartitioner object to set options for
499 
500   Level: developer
501 
502 .seealso: PetscPartitionerView()
503 @*/
504 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
505 {
506   PetscErrorCode ierr;
507 
508   PetscFunctionBegin;
509   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
510   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
511 
512   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
513   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
514   /* process any options handlers added with PetscObjectAddOptionsHandler() */
515   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
516   ierr = PetscOptionsEnd();CHKERRQ(ierr);
517   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
518   PetscFunctionReturn(0);
519 }
520 
521 #undef __FUNCT__
522 #define __FUNCT__ "PetscPartitionerSetUp"
523 /*@C
524   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
525 
526   Collective on PetscPartitioner
527 
528   Input Parameter:
529 . part - the PetscPartitioner object to setup
530 
531   Level: developer
532 
533 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
534 @*/
535 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
536 {
537   PetscErrorCode ierr;
538 
539   PetscFunctionBegin;
540   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
541   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
542   PetscFunctionReturn(0);
543 }
544 
545 #undef __FUNCT__
546 #define __FUNCT__ "PetscPartitionerDestroy"
547 /*@
548   PetscPartitionerDestroy - Destroys a PetscPartitioner object
549 
550   Collective on PetscPartitioner
551 
552   Input Parameter:
553 . part - the PetscPartitioner object to destroy
554 
555   Level: developer
556 
557 .seealso: PetscPartitionerView()
558 @*/
559 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
560 {
561   PetscErrorCode ierr;
562 
563   PetscFunctionBegin;
564   if (!*part) PetscFunctionReturn(0);
565   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
566 
567   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
568   ((PetscObject) (*part))->refct = 0;
569 
570   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
571   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
572   PetscFunctionReturn(0);
573 }
574 
575 #undef __FUNCT__
576 #define __FUNCT__ "PetscPartitionerCreate"
577 /*@
578   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
579 
580   Collective on MPI_Comm
581 
582   Input Parameter:
583 . comm - The communicator for the PetscPartitioner object
584 
585   Output Parameter:
586 . part - The PetscPartitioner object
587 
588   Level: beginner
589 
590 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
591 @*/
592 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
593 {
594   PetscPartitioner p;
595   PetscErrorCode   ierr;
596 
597   PetscFunctionBegin;
598   PetscValidPointer(part, 2);
599   *part = NULL;
600   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
601 
602   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
603 
604   *part = p;
605   PetscFunctionReturn(0);
606 }
607 
608 #undef __FUNCT__
609 #define __FUNCT__ "PetscPartitionerPartition"
610 /*@
611   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
612 
613   Collective on DM
614 
615   Input Parameters:
616 + part    - The PetscPartitioner
617 - dm      - The mesh DM
618 
619   Output Parameters:
620 + partSection     - The PetscSection giving the division of points by partition
621 - partition       - The list of points by partition
622 
623   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
624 
625   Level: developer
626 
627 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
628 @*/
629 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
630 {
631   PetscMPIInt    size;
632   PetscErrorCode ierr;
633 
634   PetscFunctionBegin;
635   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
636   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
637   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
638   PetscValidPointer(partition, 5);
639   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
640   if (size == 1) {
641     PetscInt *points;
642     PetscInt  cStart, cEnd, c;
643 
644     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
645     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
646     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
647     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
648     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
649     for (c = cStart; c < cEnd; ++c) points[c] = c;
650     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
651     PetscFunctionReturn(0);
652   }
653   if (part->height == 0) {
654     PetscInt  numVertices;
655     PetscInt *start     = NULL;
656     PetscInt *adjacency = NULL;
657 
658     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
659     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
660     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
661     ierr = PetscFree(start);CHKERRQ(ierr);
662     ierr = PetscFree(adjacency);CHKERRQ(ierr);
663   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
664   PetscFunctionReturn(0);
665 }
666 
667 #undef __FUNCT__
668 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
669 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
670 {
671   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
672   PetscErrorCode          ierr;
673 
674   PetscFunctionBegin;
675   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
676   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
677   ierr = PetscFree(p);CHKERRQ(ierr);
678   PetscFunctionReturn(0);
679 }
680 
681 #undef __FUNCT__
682 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
683 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
684 {
685   PetscViewerFormat format;
686   PetscErrorCode    ierr;
687 
688   PetscFunctionBegin;
689   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
690   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
691   PetscFunctionReturn(0);
692 }
693 
694 #undef __FUNCT__
695 #define __FUNCT__ "PetscPartitionerView_Shell"
696 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
697 {
698   PetscBool      iascii;
699   PetscErrorCode ierr;
700 
701   PetscFunctionBegin;
702   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
703   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
704   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
705   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
706   PetscFunctionReturn(0);
707 }
708 
709 #undef __FUNCT__
710 #define __FUNCT__ "PetscPartitionerPartition_Shell"
711 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
712 {
713   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
714   PetscInt                np;
715   PetscErrorCode          ierr;
716 
717   PetscFunctionBegin;
718   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
719   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
720   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
721   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
722   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
723   *partition = p->partition;
724   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
730 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
731 {
732   PetscFunctionBegin;
733   part->ops->view      = PetscPartitionerView_Shell;
734   part->ops->destroy   = PetscPartitionerDestroy_Shell;
735   part->ops->partition = PetscPartitionerPartition_Shell;
736   PetscFunctionReturn(0);
737 }
738 
739 /*MC
740   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
741 
742   Level: intermediate
743 
744 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
745 M*/
746 
747 #undef __FUNCT__
748 #define __FUNCT__ "PetscPartitionerCreate_Shell"
749 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
750 {
751   PetscPartitioner_Shell *p;
752   PetscErrorCode          ierr;
753 
754   PetscFunctionBegin;
755   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
756   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
757   part->data = p;
758 
759   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "PetscPartitionerShellSetPartition"
765 /*@C
766   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
767 
768   Collective on PART
769 
770   Input Parameters:
771 + part     - The PetscPartitioner
772 . numProcs - The number of partitions
773 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
774 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
775 
776   Level: developer
777 
778   Notes:
779 
780     It is safe to free the sizes and points arrays after use in this routine.
781 
782 .seealso DMPlexDistribute(), PetscPartitionerCreate()
783 @*/
784 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
785 {
786   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
787   PetscInt                proc, numPoints;
788   PetscErrorCode          ierr;
789 
790   PetscFunctionBegin;
791   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
792   if (sizes) {PetscValidPointer(sizes, 3);}
793   if (sizes) {PetscValidPointer(points, 4);}
794   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
795   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
796   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
797   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
798   if (sizes) {
799     for (proc = 0; proc < numProcs; ++proc) {
800       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
801     }
802   }
803   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
804   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
805   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 #undef __FUNCT__
810 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
811 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
812 {
813   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
814   PetscErrorCode          ierr;
815 
816   PetscFunctionBegin;
817   ierr = PetscFree(p);CHKERRQ(ierr);
818   PetscFunctionReturn(0);
819 }
820 
821 #undef __FUNCT__
822 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
823 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
824 {
825   PetscViewerFormat format;
826   PetscErrorCode    ierr;
827 
828   PetscFunctionBegin;
829   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
830   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
831   PetscFunctionReturn(0);
832 }
833 
834 #undef __FUNCT__
835 #define __FUNCT__ "PetscPartitionerView_Simple"
836 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
837 {
838   PetscBool      iascii;
839   PetscErrorCode ierr;
840 
841   PetscFunctionBegin;
842   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
843   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
844   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
845   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
846   PetscFunctionReturn(0);
847 }
848 
849 #undef __FUNCT__
850 #define __FUNCT__ "PetscPartitionerPartition_Simple"
851 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
852 {
853   MPI_Comm       comm;
854   PetscInt       np;
855   PetscMPIInt    size;
856   PetscErrorCode ierr;
857 
858   PetscFunctionBegin;
859   comm = PetscObjectComm((PetscObject)dm);
860   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
861   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
862   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
863   if (size == 1) {
864     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
865   }
866   else {
867     PetscMPIInt rank;
868     PetscInt nvGlobal, *offsets, myFirst, myLast;
869 
870     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
871     offsets[0] = 0;
872     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
873     for (np = 2; np <= size; np++) {
874       offsets[np] += offsets[np-1];
875     }
876     nvGlobal = offsets[size];
877     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
878     myFirst = offsets[rank];
879     myLast  = offsets[rank + 1] - 1;
880     ierr = PetscFree(offsets);CHKERRQ(ierr);
881     if (numVertices) {
882       PetscInt firstPart = 0, firstLargePart = 0;
883       PetscInt lastPart = 0, lastLargePart = 0;
884       PetscInt rem = nvGlobal % nparts;
885       PetscInt pSmall = nvGlobal/nparts;
886       PetscInt pBig = nvGlobal/nparts + 1;
887 
888 
889       if (rem) {
890         firstLargePart = myFirst / pBig;
891         lastLargePart  = myLast  / pBig;
892 
893         if (firstLargePart < rem) {
894           firstPart = firstLargePart;
895         }
896         else {
897           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
898         }
899         if (lastLargePart < rem) {
900           lastPart = lastLargePart;
901         }
902         else {
903           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
904         }
905       }
906       else {
907         firstPart = myFirst / (nvGlobal/nparts);
908         lastPart  = myLast  / (nvGlobal/nparts);
909       }
910 
911       for (np = firstPart; np <= lastPart; np++) {
912         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
913         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
914 
915         PartStart = PetscMax(PartStart,myFirst);
916         PartEnd   = PetscMin(PartEnd,myLast+1);
917         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
918       }
919     }
920   }
921   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 #undef __FUNCT__
926 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
927 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
928 {
929   PetscFunctionBegin;
930   part->ops->view      = PetscPartitionerView_Simple;
931   part->ops->destroy   = PetscPartitionerDestroy_Simple;
932   part->ops->partition = PetscPartitionerPartition_Simple;
933   PetscFunctionReturn(0);
934 }
935 
936 /*MC
937   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
938 
939   Level: intermediate
940 
941 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
942 M*/
943 
944 #undef __FUNCT__
945 #define __FUNCT__ "PetscPartitionerCreate_Simple"
946 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
947 {
948   PetscPartitioner_Simple *p;
949   PetscErrorCode           ierr;
950 
951   PetscFunctionBegin;
952   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
953   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
954   part->data = p;
955 
956   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 #undef __FUNCT__
961 #define __FUNCT__ "PetscPartitionerDestroy_Gather"
962 PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
963 {
964   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
965   PetscErrorCode          ierr;
966 
967   PetscFunctionBegin;
968   ierr = PetscFree(p);CHKERRQ(ierr);
969   PetscFunctionReturn(0);
970 }
971 
972 #undef __FUNCT__
973 #define __FUNCT__ "PetscPartitionerView_Gather_Ascii"
974 PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
975 {
976   PetscViewerFormat format;
977   PetscErrorCode    ierr;
978 
979   PetscFunctionBegin;
980   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
981   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
982   PetscFunctionReturn(0);
983 }
984 
985 #undef __FUNCT__
986 #define __FUNCT__ "PetscPartitionerView_Gather"
987 PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
988 {
989   PetscBool      iascii;
990   PetscErrorCode ierr;
991 
992   PetscFunctionBegin;
993   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
994   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
995   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
996   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
997   PetscFunctionReturn(0);
998 }
999 
1000 #undef __FUNCT__
1001 #define __FUNCT__ "PetscPartitionerPartition_Gather"
1002 PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1003 {
1004   PetscInt       np;
1005   PetscErrorCode ierr;
1006 
1007   PetscFunctionBegin;
1008   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1009   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1010   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1011   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1012   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1013   PetscFunctionReturn(0);
1014 }
1015 
1016 #undef __FUNCT__
1017 #define __FUNCT__ "PetscPartitionerInitialize_Gather"
1018 PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1019 {
1020   PetscFunctionBegin;
1021   part->ops->view      = PetscPartitionerView_Gather;
1022   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1023   part->ops->partition = PetscPartitionerPartition_Gather;
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 /*MC
1028   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1029 
1030   Level: intermediate
1031 
1032 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1033 M*/
1034 
1035 #undef __FUNCT__
1036 #define __FUNCT__ "PetscPartitionerCreate_Gather"
1037 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1038 {
1039   PetscPartitioner_Gather *p;
1040   PetscErrorCode           ierr;
1041 
1042   PetscFunctionBegin;
1043   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1044   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1045   part->data = p;
1046 
1047   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1048   PetscFunctionReturn(0);
1049 }
1050 
1051 
1052 #undef __FUNCT__
1053 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
1054 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1055 {
1056   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1057   PetscErrorCode          ierr;
1058 
1059   PetscFunctionBegin;
1060   ierr = PetscFree(p);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 #undef __FUNCT__
1065 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
1066 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1067 {
1068   PetscViewerFormat format;
1069   PetscErrorCode    ierr;
1070 
1071   PetscFunctionBegin;
1072   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1073   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 #undef __FUNCT__
1078 #define __FUNCT__ "PetscPartitionerView_Chaco"
1079 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1080 {
1081   PetscBool      iascii;
1082   PetscErrorCode ierr;
1083 
1084   PetscFunctionBegin;
1085   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1086   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1087   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1088   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1089   PetscFunctionReturn(0);
1090 }
1091 
1092 #if defined(PETSC_HAVE_CHACO)
1093 #if defined(PETSC_HAVE_UNISTD_H)
1094 #include <unistd.h>
1095 #endif
1096 /* Chaco does not have an include file */
1097 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1098                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1099                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1100                        int mesh_dims[3], double *goal, int global_method, int local_method,
1101                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1102 
1103 extern int FREE_GRAPH;
1104 #endif
1105 
1106 #undef __FUNCT__
1107 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1108 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1109 {
1110 #if defined(PETSC_HAVE_CHACO)
1111   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1112   MPI_Comm       comm;
1113   int            nvtxs          = numVertices; /* number of vertices in full graph */
1114   int           *vwgts          = NULL;   /* weights for all vertices */
1115   float         *ewgts          = NULL;   /* weights for all edges */
1116   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1117   char          *outassignname  = NULL;   /*  name of assignment output file */
1118   char          *outfilename    = NULL;   /* output file name */
1119   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1120   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1121   int            mesh_dims[3];            /* dimensions of mesh of processors */
1122   double        *goal          = NULL;    /* desired set sizes for each set */
1123   int            global_method = 1;       /* global partitioning algorithm */
1124   int            local_method  = 1;       /* local partitioning algorithm */
1125   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1126   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1127   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1128   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1129   long           seed          = 123636512; /* for random graph mutations */
1130   short int     *assignment;              /* Output partition */
1131   int            fd_stdout, fd_pipe[2];
1132   PetscInt      *points;
1133   int            i, v, p;
1134   PetscErrorCode ierr;
1135 
1136   PetscFunctionBegin;
1137   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1138   if (!numVertices) {
1139     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1140     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1141     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1142     PetscFunctionReturn(0);
1143   }
1144   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1145   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1146 
1147   if (global_method == INERTIAL_METHOD) {
1148     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1149     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1150   }
1151   mesh_dims[0] = nparts;
1152   mesh_dims[1] = 1;
1153   mesh_dims[2] = 1;
1154   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1155   /* Chaco outputs to stdout. We redirect this to a buffer. */
1156   /* TODO: check error codes for UNIX calls */
1157 #if defined(PETSC_HAVE_UNISTD_H)
1158   {
1159     int piperet;
1160     piperet = pipe(fd_pipe);
1161     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1162     fd_stdout = dup(1);
1163     close(1);
1164     dup2(fd_pipe[1], 1);
1165   }
1166 #endif
1167   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1168                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1169                    vmax, ndims, eigtol, seed);
1170 #if defined(PETSC_HAVE_UNISTD_H)
1171   {
1172     char msgLog[10000];
1173     int  count;
1174 
1175     fflush(stdout);
1176     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1177     if (count < 0) count = 0;
1178     msgLog[count] = 0;
1179     close(1);
1180     dup2(fd_stdout, 1);
1181     close(fd_stdout);
1182     close(fd_pipe[0]);
1183     close(fd_pipe[1]);
1184     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1185   }
1186 #endif
1187   /* Convert to PetscSection+IS */
1188   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1189   for (v = 0; v < nvtxs; ++v) {
1190     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1191   }
1192   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1193   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1194   for (p = 0, i = 0; p < nparts; ++p) {
1195     for (v = 0; v < nvtxs; ++v) {
1196       if (assignment[v] == p) points[i++] = v;
1197     }
1198   }
1199   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1200   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1201   if (global_method == INERTIAL_METHOD) {
1202     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1203   }
1204   ierr = PetscFree(assignment);CHKERRQ(ierr);
1205   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1206   PetscFunctionReturn(0);
1207 #else
1208   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1209 #endif
1210 }
1211 
1212 #undef __FUNCT__
1213 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1214 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1215 {
1216   PetscFunctionBegin;
1217   part->ops->view      = PetscPartitionerView_Chaco;
1218   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1219   part->ops->partition = PetscPartitionerPartition_Chaco;
1220   PetscFunctionReturn(0);
1221 }
1222 
1223 /*MC
1224   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1225 
1226   Level: intermediate
1227 
1228 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1229 M*/
1230 
1231 #undef __FUNCT__
1232 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1233 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1234 {
1235   PetscPartitioner_Chaco *p;
1236   PetscErrorCode          ierr;
1237 
1238   PetscFunctionBegin;
1239   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1240   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1241   part->data = p;
1242 
1243   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1244   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1245   PetscFunctionReturn(0);
1246 }
1247 
1248 #undef __FUNCT__
1249 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1250 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1251 {
1252   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1253   PetscErrorCode             ierr;
1254 
1255   PetscFunctionBegin;
1256   ierr = PetscFree(p);CHKERRQ(ierr);
1257   PetscFunctionReturn(0);
1258 }
1259 
1260 #undef __FUNCT__
1261 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1262 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1263 {
1264   PetscViewerFormat format;
1265   PetscErrorCode    ierr;
1266 
1267   PetscFunctionBegin;
1268   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1269   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1270   PetscFunctionReturn(0);
1271 }
1272 
1273 #undef __FUNCT__
1274 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1275 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1276 {
1277   PetscBool      iascii;
1278   PetscErrorCode ierr;
1279 
1280   PetscFunctionBegin;
1281   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1282   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1283   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1284   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 #if defined(PETSC_HAVE_PARMETIS)
1289 #include <parmetis.h>
1290 #endif
1291 
1292 #undef __FUNCT__
1293 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1294 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1295 {
1296 #if defined(PETSC_HAVE_PARMETIS)
1297   MPI_Comm       comm;
1298   PetscSection   section;
1299   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1300   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1301   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1302   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1303   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1304   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1305   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1306   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1307   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1308   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1309   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1310   PetscInt       options[5];               /* Options */
1311   /* Outputs */
1312   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1313   PetscInt      *assignment, *points;
1314   PetscMPIInt    rank, p, v, i;
1315   PetscErrorCode ierr;
1316 
1317   PetscFunctionBegin;
1318   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1319   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1320   options[0] = 0; /* Use all defaults */
1321   /* Calculate vertex distribution */
1322   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1323   vtxdist[0] = 0;
1324   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1325   for (p = 2; p <= nparts; ++p) {
1326     vtxdist[p] += vtxdist[p-1];
1327   }
1328   /* Calculate weights */
1329   for (p = 0; p < nparts; ++p) {
1330     tpwgts[p] = 1.0/nparts;
1331   }
1332   ubvec[0] = 1.05;
1333   /* Weight cells by dofs on cell by default */
1334   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1335   if (section) {
1336     PetscInt cStart, cEnd, dof;
1337 
1338     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1339     for (v = cStart; v < cEnd; ++v) {
1340       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1341       vwgt[v-cStart] = PetscMax(dof, 1);
1342     }
1343   } else {
1344     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1345   }
1346 
1347   if (nparts == 1) {
1348     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1349   } else {
1350     if (vtxdist[1] == vtxdist[nparts]) {
1351       if (!rank) {
1352         PetscStackPush("METIS_PartGraphKway");
1353         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1354         PetscStackPop;
1355         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1356       }
1357     } else {
1358       PetscStackPush("ParMETIS_V3_PartKway");
1359       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1360       PetscStackPop;
1361       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1362     }
1363   }
1364   /* Convert to PetscSection+IS */
1365   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1366   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1367   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1368   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1369   for (p = 0, i = 0; p < nparts; ++p) {
1370     for (v = 0; v < nvtxs; ++v) {
1371       if (assignment[v] == p) points[i++] = v;
1372     }
1373   }
1374   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1375   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1376   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1377   PetscFunctionReturn(0);
1378 #else
1379   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1380 #endif
1381 }
1382 
1383 #undef __FUNCT__
1384 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1385 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1386 {
1387   PetscFunctionBegin;
1388   part->ops->view      = PetscPartitionerView_ParMetis;
1389   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1390   part->ops->partition = PetscPartitionerPartition_ParMetis;
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 /*MC
1395   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1396 
1397   Level: intermediate
1398 
1399 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1400 M*/
1401 
1402 #undef __FUNCT__
1403 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1404 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1405 {
1406   PetscPartitioner_ParMetis *p;
1407   PetscErrorCode          ierr;
1408 
1409   PetscFunctionBegin;
1410   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1411   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1412   part->data = p;
1413 
1414   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1415   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1416   PetscFunctionReturn(0);
1417 }
1418 
1419 #undef __FUNCT__
1420 #define __FUNCT__ "DMPlexGetPartitioner"
1421 /*@
1422   DMPlexGetPartitioner - Get the mesh partitioner
1423 
1424   Not collective
1425 
1426   Input Parameter:
1427 . dm - The DM
1428 
1429   Output Parameter:
1430 . part - The PetscPartitioner
1431 
1432   Level: developer
1433 
1434   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1435 
1436 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1437 @*/
1438 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1439 {
1440   DM_Plex *mesh = (DM_Plex *) dm->data;
1441 
1442   PetscFunctionBegin;
1443   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1444   PetscValidPointer(part, 2);
1445   *part = mesh->partitioner;
1446   PetscFunctionReturn(0);
1447 }
1448 
1449 #undef __FUNCT__
1450 #define __FUNCT__ "DMPlexSetPartitioner"
1451 /*@
1452   DMPlexSetPartitioner - Set the mesh partitioner
1453 
1454   logically collective on dm and part
1455 
1456   Input Parameters:
1457 + dm - The DM
1458 - part - The partitioner
1459 
1460   Level: developer
1461 
1462   Note: Any existing PetscPartitioner will be destroyed.
1463 
1464 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1465 @*/
1466 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1467 {
1468   DM_Plex       *mesh = (DM_Plex *) dm->data;
1469   PetscErrorCode ierr;
1470 
1471   PetscFunctionBegin;
1472   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1473   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1474   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1475   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1476   mesh->partitioner = part;
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 #undef __FUNCT__
1481 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1482 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1483 {
1484   PetscErrorCode ierr;
1485 
1486   PetscFunctionBegin;
1487   if (up) {
1488     PetscInt parent;
1489 
1490     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1491     if (parent != point) {
1492       PetscInt closureSize, *closure = NULL, i;
1493 
1494       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1495       for (i = 0; i < closureSize; i++) {
1496         PetscInt cpoint = closure[2*i];
1497 
1498         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1499         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1500       }
1501       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1502     }
1503   }
1504   if (down) {
1505     PetscInt numChildren;
1506     const PetscInt *children;
1507 
1508     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1509     if (numChildren) {
1510       PetscInt i;
1511 
1512       for (i = 0; i < numChildren; i++) {
1513         PetscInt cpoint = children[i];
1514 
1515         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1516         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1517       }
1518     }
1519   }
1520   PetscFunctionReturn(0);
1521 }
1522 
1523 #undef __FUNCT__
1524 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1525 /*@
1526   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1527 
1528   Input Parameters:
1529 + dm     - The DM
1530 - label  - DMLabel assinging ranks to remote roots
1531 
1532   Level: developer
1533 
1534 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1535 @*/
1536 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1537 {
1538   IS              rankIS,   pointIS;
1539   const PetscInt *ranks,   *points;
1540   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1541   PetscInt       *closure = NULL;
1542   PetscErrorCode  ierr;
1543 
1544   PetscFunctionBegin;
1545   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1546   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1547   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1548   for (r = 0; r < numRanks; ++r) {
1549     const PetscInt rank = ranks[r];
1550 
1551     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1552     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1553     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1554     for (p = 0; p < numPoints; ++p) {
1555       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1556       for (c = 0; c < closureSize*2; c += 2) {
1557         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1558         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1559       }
1560     }
1561     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1562     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1563   }
1564   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1565   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1566   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1567   PetscFunctionReturn(0);
1568 }
1569 
1570 #undef __FUNCT__
1571 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1572 /*@
1573   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1574 
1575   Input Parameters:
1576 + dm     - The DM
1577 - label  - DMLabel assinging ranks to remote roots
1578 
1579   Level: developer
1580 
1581 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1582 @*/
1583 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1584 {
1585   IS              rankIS,   pointIS;
1586   const PetscInt *ranks,   *points;
1587   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1588   PetscInt       *adj = NULL;
1589   PetscErrorCode  ierr;
1590 
1591   PetscFunctionBegin;
1592   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1593   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1594   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1595   for (r = 0; r < numRanks; ++r) {
1596     const PetscInt rank = ranks[r];
1597 
1598     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1599     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1600     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1601     for (p = 0; p < numPoints; ++p) {
1602       adjSize = PETSC_DETERMINE;
1603       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1604       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1605     }
1606     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1607     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1608   }
1609   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1610   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1611   ierr = PetscFree(adj);CHKERRQ(ierr);
1612   PetscFunctionReturn(0);
1613 }
1614 
1615 #undef __FUNCT__
1616 #define __FUNCT__ "DMPlexPartitionLabelPropagate"
1617 /*@
1618   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1619 
1620   Input Parameters:
1621 + dm     - The DM
1622 - label  - DMLabel assinging ranks to remote roots
1623 
1624   Level: developer
1625 
1626   Note: This is required when generating multi-level overlaps to capture
1627   overlap points from non-neighbouring partitions.
1628 
1629 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1630 @*/
1631 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1632 {
1633   MPI_Comm        comm;
1634   PetscMPIInt     rank;
1635   PetscSF         sfPoint;
1636   DMLabel         lblRoots, lblLeaves;
1637   IS              rankIS, pointIS;
1638   const PetscInt *ranks;
1639   PetscInt        numRanks, r;
1640   PetscErrorCode  ierr;
1641 
1642   PetscFunctionBegin;
1643   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1644   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1645   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1646   /* Pull point contributions from remote leaves into local roots */
1647   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1648   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1649   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1650   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1651   for (r = 0; r < numRanks; ++r) {
1652     const PetscInt remoteRank = ranks[r];
1653     if (remoteRank == rank) continue;
1654     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1655     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1656     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1657   }
1658   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1659   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1660   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1661   /* Push point contributions from roots into remote leaves */
1662   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1663   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1664   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1665   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1666   for (r = 0; r < numRanks; ++r) {
1667     const PetscInt remoteRank = ranks[r];
1668     if (remoteRank == rank) continue;
1669     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1670     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1671     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1672   }
1673   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1674   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1675   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1676   PetscFunctionReturn(0);
1677 }
1678 
1679 #undef __FUNCT__
1680 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1681 /*@
1682   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1683 
1684   Input Parameters:
1685 + dm        - The DM
1686 . rootLabel - DMLabel assinging ranks to local roots
1687 . processSF - A star forest mapping into the local index on each remote rank
1688 
1689   Output Parameter:
1690 - leafLabel - DMLabel assinging ranks to remote roots
1691 
1692   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1693   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1694 
1695   Level: developer
1696 
1697 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1698 @*/
1699 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1700 {
1701   MPI_Comm           comm;
1702   PetscMPIInt        rank, numProcs;
1703   PetscInt           p, n, numNeighbors, size, l, nleaves;
1704   PetscSF            sfPoint;
1705   PetscSFNode       *rootPoints, *leafPoints;
1706   PetscSection       rootSection, leafSection;
1707   const PetscSFNode *remote;
1708   const PetscInt    *local, *neighbors;
1709   IS                 valueIS;
1710   PetscErrorCode     ierr;
1711 
1712   PetscFunctionBegin;
1713   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1714   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1715   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1716   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1717 
1718   /* Convert to (point, rank) and use actual owners */
1719   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1720   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1721   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1722   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1723   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1724   for (n = 0; n < numNeighbors; ++n) {
1725     PetscInt numPoints;
1726 
1727     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1728     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1729   }
1730   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1731   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1732   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1733   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1734   for (n = 0; n < numNeighbors; ++n) {
1735     IS              pointIS;
1736     const PetscInt *points;
1737     PetscInt        off, numPoints, p;
1738 
1739     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1740     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1741     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1742     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1743     for (p = 0; p < numPoints; ++p) {
1744       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1745       else       {l = -1;}
1746       if (l >= 0) {rootPoints[off+p] = remote[l];}
1747       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1748     }
1749     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1750     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1751   }
1752   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1753   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1754   /* Communicate overlap */
1755   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1756   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1757   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1758   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1759   for (p = 0; p < size; p++) {
1760     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1761   }
1762   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1763   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1764   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1765   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1766   PetscFunctionReturn(0);
1767 }
1768 
1769 #undef __FUNCT__
1770 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1771 /*@
1772   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1773 
1774   Input Parameters:
1775 + dm    - The DM
1776 . label - DMLabel assinging ranks to remote roots
1777 
1778   Output Parameter:
1779 - sf    - The star forest communication context encapsulating the defined mapping
1780 
1781   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1782 
1783   Level: developer
1784 
1785 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1786 @*/
1787 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1788 {
1789   PetscMPIInt     rank, numProcs;
1790   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1791   PetscSFNode    *remotePoints;
1792   IS              remoteRootIS;
1793   const PetscInt *remoteRoots;
1794   PetscErrorCode ierr;
1795 
1796   PetscFunctionBegin;
1797   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1798   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1799 
1800   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1801     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1802     numRemote += numPoints;
1803   }
1804   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1805   /* Put owned points first */
1806   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1807   if (numPoints > 0) {
1808     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1809     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1810     for (p = 0; p < numPoints; p++) {
1811       remotePoints[idx].index = remoteRoots[p];
1812       remotePoints[idx].rank = rank;
1813       idx++;
1814     }
1815     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1816     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1817   }
1818   /* Now add remote points */
1819   for (n = 0; n < numProcs; ++n) {
1820     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1821     if (numPoints <= 0 || n == rank) continue;
1822     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1823     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1824     for (p = 0; p < numPoints; p++) {
1825       remotePoints[idx].index = remoteRoots[p];
1826       remotePoints[idx].rank = n;
1827       idx++;
1828     }
1829     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1830     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1831   }
1832   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1833   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1834   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1835   PetscFunctionReturn(0);
1836 }
1837